def _update_unfit_groups_with_crossgroup_dist(dist_metrics, fit_group, fit_pvals, unfit_group, buffer_group,
                                              user_ids, user_profiles, user_connections, ks_alpha=0.05):
    """ update members in unfit_group with cross-group distance. unfit members are kept in buffer_group
    """
    # to keep API consistant
    # restore user_profiles to DataFrame including
    user_graph = Graph()
    user_graph.add_edges_from(user_connections)

    unfit_group_copy = unfit_group.copy()
    for gg, gg_user_ids in unfit_group_copy.items():
        # extract cross-group distance metrics dictionary to avoid duplicate
        # tests with distance metrics associated with user's group
        other_group_keys = [group_key for group_key in dist_metrics.keys() if not group_key == gg]
        cross_group_dist_metrics = {key: dist_metrics[key] for key in other_group_keys}

        for ii, ii_user_id in enumerate(gg_user_ids):
            ii_new_group, ii_new_pval = find_fit_group(ii_user_id, cross_group_dist_metrics,
                                                       user_ids, user_profiles, user_graph, ks_alpha,
                                                       current_group=None, fit_rayleigh=False)
            # redistribute the user based on fit-tests
            if not ii_new_group is None:
                # remove member with fit from buffer_group
                if ii_new_group in fit_group:
                    fit_group[ii_new_group].append(ii_user_id)
                    fit_pvals[ii_new_group].append(ii_new_pval)
                else:
                    fit_group[ii_new_group] = [ii_user_id]
                    fit_pvals[ii_new_group] = [ii_new_pval]
            else:
                buffer_group.append(ii_user_id)

    return fit_group, fit_pvals, buffer_group
def _update_buffer_group(dist_metrics, fit_group, fit_pvals, buffer_group,
                         user_ids, user_profiles, user_connections, ks_alpha=0.05):
    """ return fit_group, fit_pvals, buffer_group
        redistribute member in buffer group into fit_group if fit had been found
    """
    # to keep API consistant
    # restore user_profiles to DataFrame including
    user_graph = Graph()
    user_graph.add_edges_from(user_connections)

    buffer_group_copy = buffer_group.copy()
    if len(buffer_group_copy) > 0:
        for ii, ii_user_id in enumerate(buffer_group_copy):
            ii_new_group, ii_new_pval = find_fit_group(ii_user_id, dist_metrics,
                                                       user_ids, user_profiles, user_graph, ks_alpha,
                                                       current_group=None, fit_rayleigh=False)
            if not ii_new_group is None:
                # remove member with fit from buffer_group
                buffer_group.remove(ii_user_id)
                if ii_new_group in fit_group:
                    fit_group[ii_new_group].append(ii_user_id)
                    fit_pvals[ii_new_group].append(ii_new_pval)
                else:
                    fit_group[ii_new_group] = [ii_user_id]
                    fit_pvals[ii_new_group] = [ii_new_pval]

    return fit_group, fit_pvals, buffer_group
示例#3
0
 def __init__(self, data=None, name='', file=None, **attr):
     Graph.__init__(self, data=data,name=name,**attr)
     if file is None:
         import sys
         self.fh=sys.stdout
     else:
         self.fh=open(file,'w')
示例#4
0
    def __init__(self, environment=None, channelType=None, algorithms=(),
                 networkRouting=True, propagation_type=2, **kwargs):

        Graph.__init__(self)
        self._environment = environment or Environment()
        # assert(isinstance(self.environment, Environment))
        self.channelType = channelType or ChannelType(self._environment)
        if isinstance(self.channelType, Doi):
            doi = kwargs.pop('doi', 0)
            print "In DOI %s" %doi
            self.channelType.set_params(doi=doi)
        self.channelType.environment = self._environment
        self.propagation = propagation.PropagationModel(propagation_type=propagation_type)
        self.pos = {}
        self.ori = {}
        self.labels = {}
        #self.star = star_graph
        self.name = "WSN"
        self._algorithms = ()
        self.algorithms = algorithms or settings.ALGORITHMS
        self.algorithmState = {'index': 0, 'step': 1, 'finished': False}
        self.outbox = []
        self.networkRouting = networkRouting
        self.comm_range = kwargs.pop('commRange', None) or settings.COMM_RANGE
        logger.info("Instance of Network has been initialized with %s (%s)" % (self.propagation, self.comm_range))
示例#5
0
    def multiple_edges(self, new=None):
        """
        Get/set whether or not self allows multiple edges.

        INPUT:

        - ``new`` -- can be a boolean (in which case it sets the value) or
          ``None``, in which case the current value is returned. It is set to
          ``None`` by default.

        TESTS::

            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.multiple_edges(True)
            sage: G.multiple_edges(None)
            True
        """
        if isinstance(self._nxg, (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)):
            self._nxg = self._nxg.mutate()

        from networkx import Graph,MultiGraph,DiGraph,MultiDiGraph
        if new is None:
            return self._nxg.is_multigraph()
        if new == self._nxg.is_multigraph():
            return
        if new:
            if self._nxg.is_directed():
                self._nxg = MultiDiGraph(self._nxg)
            else:
                self._nxg = MultiGraph(self._nxg)
        else:
            if self._nxg.is_directed():
                self._nxg = DiGraph(self._nxg)
            else:
                self._nxg = Graph(self._nxg)
示例#6
0
文件: network.py 项目: engalex/pymote
    def add_node(self, node=None, pos=None, ori=None, commRange=None):
        """
        Add node to network.

        Attributes:
          `node` -- node to add, default: new node is created
          `pos` -- position (x,y), default: random free position in environment
          `ori` -- orientation from 0 to 2*pi, default: random orientation

        """
        if (not node):
            node = Node(commRange=commRange)
        assert(isinstance(node, Node))
        if not node.network:
            node.network = self
        else:
            logger.warning('Node is already in another network, can\'t add.')
            return None

        pos = pos if pos is not None else self.find_random_pos(n=100)
        ori = ori if ori is not None else rand() * 2 * pi
        ori = ori % (2 * pi)

        if (self._environment.is_space(pos)):
            Graph.add_node(self, node)
            self.pos[node] = array(pos)
            self.ori[node] = ori
            self.labels[node] = str(node.id)
            logger.debug('Node %d is placed on position %s.' % (node.id, pos))
            self.recalculate_edges([node])
        else:
            logger.error('Given position is not free space.')
        return node
def extract_colored_faces(fname, colors):
    output = {color:[] for color in colors}
    vertices, faces = load_ply(fname)
    
    for color in colors:
        colored_vertices_indices = np.nonzero((vertices['color'] == color).all(axis=1))[0]
        colored_faces = np.nonzero(np.all((np.in1d(faces["indices"][:,0], colored_vertices_indices),
                                           np.in1d(faces["indices"][:,1], colored_vertices_indices),
                                           np.in1d(faces["indices"][:,2], colored_vertices_indices)), axis=0))[0]

        colored_faces_graph = Graph()
        colored_faces_graph.add_edges_from(faces['indices'][colored_faces][:,:2])
        colored_faces_graph.add_edges_from(faces['indices'][colored_faces][:,1:])
        colored_faces_graph.add_edges_from(faces['indices'][colored_faces][:,(0,2)])
        
        planes_vertices_indices = list(connected_components(colored_faces_graph))
        print len(planes_vertices_indices)
        for  plane_vertices_indices in planes_vertices_indices:
                colored_vertices = vertices["position"][list(plane_vertices_indices)]
                dipdir, dip = calc_sphere(*general_axis(colored_vertices, -1))
                X, Y, Z = colored_vertices.mean(axis=0)
                highest_vertex = colored_vertices[np.argmax(colored_vertices[:,2]),:]
                lowest_vertex = colored_vertices[np.argmin(colored_vertices[:,2]),:]
                trace = np.linalg.norm(highest_vertex - lowest_vertex)
                output[color].append((dipdir, dip, X, Y, Z, trace))
    return output
示例#8
0
    def multiple_edges(self, new):
        """
        Get/set whether or not self allows multiple edges.
        
        INPUT:
            new: boolean or None
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.multiple_edges(True)
            sage: G.multiple_edges(None)
            True
        """
        try:
            assert(not isinstance(self._nxg, (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        from networkx import Graph,MultiGraph,DiGraph,MultiDiGraph
        if new is None:
            return self._nxg.is_multigraph()
        if new == self._nxg.is_multigraph():
            return
        if new:
            if self._nxg.is_directed():
                self._nxg = MultiDiGraph(self._nxg)
            else:
                self._nxg = MultiGraph(self._nxg)
        else:
            if self._nxg.is_directed():
                self._nxg = DiGraph(self._nxg)
            else:
                self._nxg = Graph(self._nxg)
def tuples_to_graph(tuples):
    G = Graph()
    for node, attribute in tuples:
        print 'adding', node, attribute
        G.add_nodes_from(node, freq=attribute)
        G.add_edges_from(to_edges(node))
    return G
示例#10
0
    def add_layer(self, layer, **attr):
        if self.num_nodes_in_layers is 0:
            self.list_of_layers=[layer]
        else:
            self.list_of_layers.append(layer)
            
        self.num_layers = len(self.list_of_layers)
        self.num_nodes_in_layers = self.list_of_layers[0].number_of_nodes()
        
        for i,j in layer.edges():
			self.intra_layer_edges.append((
			i+(len(self.list_of_layers)-1)*layer.number_of_nodes(),
			j+(len(self.list_of_layers)-1)*layer.number_of_nodes()))
			
        try:
            Graph.__init__(self,
                        Graph(disjoint_union_all(self.list_of_layers),
                        **attr))
        except multinetxError:
            raise multinetxError("Multiplex cannot inherit Graph properly")

        ## Check if all graphs have the same number of nodes
        for lg in self.list_of_layers:
            try:
                assert(lg.number_of_nodes() == self.num_nodes_in_layers)
            except AssertionError:
                raise multinetxError("Graph at layer does not have the same number of nodes")  
示例#11
0
def network_UKGDS(filename,header=28):
	"""
	Load Excel file with UKGDS data format and build dict array of bus coordinates
	and graph structure suitable for plotting with the networkx module.
	"""
	from numpy import array,where
	from pandas import ExcelFile
	from networkx import Graph

	data = ExcelFile(filename)
	bus = data.parse("Buses",header=header)
	branch = data.parse("Branches",header=header)
	pos = {}
	for node in range(len(bus["BNU"])):
		pos.update({node:array([bus["BXC"][node],bus["BYC"][node]])})
	net = []
	for k in range(len(branch["CFB"])):
		von = where(bus["BNU"]==branch["CFB"][k])[0][0]
		zu  = where(bus["BNU"]==branch["CTB"][k])[0][0]
		net.append([von,zu])
	nodes = set([n1 for n1,n2 in net] + [n2 for n1,n2 in net])
	G = Graph()
	for node in nodes:
		G.add_node(node)
	for edge in net:
		G.add_edge(edge[0],edge[1])
	return G,pos
示例#12
0
def compute_molecule(universe):
    '''
    Cluster atoms into molecules.

    The algorithm is to create a network graph containing every atom (in every
    frame as nodes and bonds as edges). Using this connectivity information,
    one can perform a (breadth first) traversal of the network graph to cluster
    all nodes (whose indices correspond to physical atoms).

    Args:
        universe (:class:`~exatomic.universe.Universe`): Atomic universe

    Returns:
        objs (tuple): Molecule indices (for atom dataframe(s)) and molecule dataframe

    Warning:
        This function will modify (in place) a few tables of the universe!
    '''
    if 'bond_count' not in universe.atom:    # The bond count is used to find single atoms;
        universe.compute_bond_count()        # single atoms are treated as molecules.
    b0 = None
    b1 = None
    bonded = universe.two[universe.two['bond'] == True]
    if universe.is_periodic:
        mapper = universe.projected_atom['atom']
        b0 = bonded['prjd_atom0'].map(mapper)
        b1 = bonded['prjd_atom1'].map(mapper)
    else:
        b0 = bonded['atom0']
        b1 = bonded['atom1']
    graph = Graph()
    graph.add_edges_from(zip(b0.values, b1.values))
    mapper = {}
    for i, molecule in enumerate(connected_components(graph)):
        for atom in molecule:
            mapper[atom] = i
    n = 1
    if len(mapper.values()) > 0:
        n += max(mapper.values())
    else:
        n -= 1
    idxs = universe.atom[universe.atom['bond_count'] == 0].index
    for i, index in enumerate(idxs):
        mapper[index] = i + n
    # Set the molecule indices
    universe.atom['molecule'] = universe.atom.index.map(lambda idx: mapper[idx])
    # Now compute molecule table
    universe.atom['mass'] = universe.atom['symbol'].map(symbol_to_element_mass)
    # The coordinates of visual_atom represent grouped molecules for
    # periodic calculations and absolute coordinates for free boundary conditions.
    molecules = universe.atom.groupby('molecule')
    molecule = molecules['symbol'].value_counts().unstack().fillna(0).astype(np.int64)
    molecule.columns.name = None
    molecule['frame'] = universe.atom.drop_duplicates('molecule').set_index('molecule')['frame']
    molecule['mass'] = molecules['mass'].sum()
    del universe.atom['mass']
    frame = universe.atom[['molecule', 'frame']].drop_duplicates('molecule')
    frame = frame.set_index('molecule')['frame'].astype(np.int64)
    molecule['frame'] = frame.astype('category')
    return Molecule(molecule)
示例#13
0
def summarize(text, sentence_count=5, language='english'):
    processor = LanguageProcessor(language)

    sentence_list = processor.split_sentences(text)
    wordset_list = map(processor.extract_significant_words, sentence_list)
    stemsets = [
        {processor.stem(word) for word in wordset}
        for wordset in wordset_list
    ]

    graph = Graph()
    pairs = combinations(enumerate(stemsets), 2)
    for (index_a, stems_a), (index_b, stems_b) in pairs:
        if stems_a and stems_b:
            similarity = 1 - jaccard(stems_a, stems_b)
            if similarity > 0:
                graph.add_edge(index_a, index_b, weight=similarity)

    ranked_sentence_indexes = list(pagerank(graph).items())
    if ranked_sentence_indexes:
        sentences_by_rank = sorted(
            ranked_sentence_indexes, key=itemgetter(1), reverse=True)
        best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
        best_sentences_in_order = sorted(best_sentences)
    else:
        best_sentences_in_order = range(min(sentence_count, len(sentence_list)))

    return ' '.join(sentence_list[index] for index in best_sentences_in_order)
示例#14
0
def draw_network(bus,branch,bus_names=None,coordinates=None,ax=None):
	"""Generate networkx Graph object and draw network diagram
	It is assumed that bus and branch ordering corresponds to PyPower format
	"""
	from networkx import Graph,draw
	from matplotlib.pyplot import show
	if isinstance(coordinates,np.ndarray):
		pos = {}
		if coordinates.shape[0]==2:
			coordinates = coordinates.T
		for node in range(len(bus)):
			pos.update({node:coordinates[node,:]})
	else:
		pos = None
	net = []
	for k in range(len(branch)):
		von = np.where(bus[:,BUS_I]==branch[k,F_BUS])[0][0]
		zu  = np.where(bus[:,BUS_I]==branch[k,T_BUS])[0][0]
		net.append([von,zu])
	nodes = set([n1 for n1,n2 in net] + [n2 for n1,n2 in net])
	G = Graph()
	for node in nodes:
		G.add_node(node)
	for edge in net:
		G.add_edge(edge[0],edge[1])
	draw(G,pos,ax=ax)
	show()
示例#15
0
def convert_local_tree_topology_to_graph(loc_tree_topo, tree_node_labeling):
    """ Creates a directed, acyclic NetworkX graph from a local tree topology

    Parameters
    ----------
    loc_tree_topo: array-like
        The local tree toplogy, where the root node element is -1

    tree_node_labeling: array-like
        The integer ids for each tree node

    Returns
    -------
    G : NetworkX graph

    """

    assert( loc_tree_topo[0] == -1 )

    G = Graph()
    G.add_nodes_from( tree_node_labeling )
    # build up graph connectivity
    con = vstack( (loc_tree_topo, range(len(loc_tree_topo))) )
    # prune root node connectivity
    con = con[:,1:]
    # update with correct labels
    con = tree_node_labeling[con]
    G.add_edges_from( zip(con[0,:], con[1,:]) )

    return G
示例#16
0
def edmondskarp(G: nx.Graph, s, t):

    RG = G.copy()

    for u,v in G.edges_iter():
        G.edge[u][v]['flow'] = 0

    path = isthereapath(RG,s,t)
    while len(path) > 0:
        path_cp = min([RG.edge[u][v]['capacity'] for u,v in path])
        for u,v in path:
            if G.has_edge(u,v):
                G.edge[u][v]['flow'] += path_cp
                RG.edge[u][v]['capacity'] -= path_cp
                if RG.edge[u][v]['capacity'] == 0:
                    RG.remove_edge(u,v)

                if RG.has_edge(v,u):
                    RG.edge[v][u]['capacity'] += path_cp
                else:
                    RG.add_edge(v,u,capacity=path_cp)
            else:
                # then this edge is a "cancelling" flow
                # residue should go up and cancelling "capacity" should go down
                G.edge[v][u]['flow'] -= path_cp
                RG.edge[v][u]['capacity'] += path_cp
                RG.edge[u][v]['capacity'] -= path_cp
                if RG.edge[u][v]['capacity'] == 0:
                    RG.remove_edge(u,v)
        path = isthereapath(RG,s,t)

    return RG
示例#17
0
def make_graph(points, neighbor_max_dist=0.01):
    graph = Graph()
    graph.add_nodes_from(range(len(points)))
    for i in xrange(len(points)):
        for j in xrange(i+1, len(points)):
            if euclidian_3d_dist(points[i], points[j])<neighbor_max_dist:
                graph.add_edge(i,j)
    return graph
示例#18
0
def build_graph(ways):
    graph = Graph()
    for way, tags in ways:
        for segment in nwise(way.coords):
            weight = length(segment) * coef(tags)
            graph.add_edge(segment[0], segment[1],
                           weight=weight)
    return graph
示例#19
0
def test_exceptions():
    test = Graph()
    test.add_node('a')
    assert_raises(NetworkXError, asyn_fluidc, test, 'hi')
    assert_raises(NetworkXError, asyn_fluidc, test, -1)
    assert_raises(NetworkXError, asyn_fluidc, test, 3)
    test.add_node('b')
    assert_raises(NetworkXError, asyn_fluidc, test, 1)
示例#20
0
def find_rings(atoms):
    graph = Graph()
    for i, atom1 in enumerate(atoms):
        for atom2 in atoms[i+1:]:
            if is_bound(atom1.cart, atom1.element, atom2.cart, atom2.element):
                graph.add_edge(atom1.name, atom2.name)
    ring_list = cycle_basis(graph)
    return ring_list
def build_graph(edge_weight):
    """
    建图,无向
    返回一个list,list中每个元素为一个图
    """
    from networkx import Graph
    graph = Graph()
    graph.add_weighted_edges_from(edge_weight)
    return graph
示例#22
0
def read_mc():
	"""
	Parses the Miller and Charles word similarity test collection.
	"""
	G=Graph()
	filename = get_support_data_filename('mc/EN-MC-30.txt')
	with open(filename) as file:
		for line in file:
			parts = line.split()
			G.add_edge(parts[0], parts[1], weight=float(parts[2]))
示例#23
0
def read_graphml(graph_file):
    from BeautifulSoup import BeautifulSoup as Soup
    soup = Soup(graph_file.read())
    graph = Graph()

    for edge in soup.findAll("edge"):
        source = int(edge['source'])
        target = int(edge['target'])
        graph.add_edge(source, target)
    return graph
示例#24
0
文件: network.py 项目: engalex/pymote
 def remove_node(self, node):
     """ Remove node from network. """
     if node not in self.nodes():
         logger.error("Node not in network")
         return
     Graph.remove_node(self, node)
     del self.pos[node]
     del self.labels[node]
     node.network = None
     logger.debug('Node with id %d is removed.' % node.id)
示例#25
0
def read_rw():
	"""
	Parses the rare word similarity test collection.
	"""
	G = Graph()
	filename = get_support_data_filename('rw/rw.txt')
	with open(filename) as file:
		for line in file:
			parts = line.split()
			G.add_edge(parts[0], parts[1], weight=(float(parts[2])/10))
示例#26
0
def make_colors(graph: nx.Graph) -> map:
    names = graph.nodes()
    longest = max(names)
    raw = [levenshtein_distance(x, longest) for x in names]
    largest_raw = max(raw)
    degrees = [graph.degree(x) for x in graph]
    largest_degrees = max(degrees)
    return map(lambda x, y: x + y,
               [int(10 * x/largest_degrees) for x in degrees],
               [10 * x/largest_raw for x in raw])
示例#27
0
def solve():
	from networkx import Graph, max_flow
	W, H, B = map(int,input().split())
	R = [[True for _ in range(H)] for _ in range(W)]
	
	for _ in range(B):
		x0, y0, x1, y1 = map(int,input().split())
		for x in range(x0,x1+1):
			for y in range(y0,y1+1):
				R[x][y] = False
	
	G = Graph()
	
	for x in range(W):
		for y in range(H):
			if R[x][y]:
				G.add_edge(('start',x,y),('end',x,y),capacity=1)
			
				if y == 0:
					G.add_edge('source',('start',x,y))
				else:
					if R[x][y-1]:
						#G.add_edge(('end',x,y),('start',x,y-1))
						pass
				
				if y == H-1:
					G.add_edge(('end',x,y),'sink')
				else:
					if R[x][y+1]:
						G.add_edge(('end',x,y),('start',x,y+1))
	
	return max_flow(G,'source','sink','capacity')
示例#28
0
def test_two_nodes():
    test = Graph()

    test.add_edge('a', 'b')

    # ground truth
    ground_truth = set([frozenset(['a']), frozenset(['b'])])

    communities = asyn_fluidc(test, 2)
    result = {frozenset(c) for c in communities}
    assert_equal(result, ground_truth)
示例#29
0
def test_single_node():
    test = Graph()

    test.add_node('a')

    # ground truth
    ground_truth = set([frozenset(['a'])])

    communities = asyn_fluidc(test, 1)
    result = {frozenset(c) for c in communities}
    assert_equal(result, ground_truth)
示例#30
0
def read_gml(graph_file):
    graph = Graph()

    data = read_gml_data(graph_file)
    for n in data['graph']['node']:
        graph.add_node(int(n['id']))

    for e in data['graph']['edge']:
        graph.add_edge(int(e['source']), int(e['target']))

    return graph
示例#31
0
"""
This is an example derivation.
If you want to test something you can use it.
It's better to copy-paste this file as `test.py` in order
not to accidentally commit this file.
"""
from matplotlib import pyplot
from networkx import Graph

from agh_graphs.productions.p1 import P1
from agh_graphs.productions.p2 import P2
from agh_graphs.utils import gen_name
from agh_graphs.visualize import visualize_graph_layer, visualize_graph_3d

if __name__ == '__main__':
    graph = Graph()
    initial_node_name = gen_name()
    graph.add_node(initial_node_name, layer=0, position=(0.5, 0.5), label='E')

    [i1, i2] = P1().apply(graph, [initial_node_name])
    [i1_1, i1_2] = P2().apply(graph, [i1])
    [i2_1, i2_2] = P2().apply(graph, [i2])
    [i3_1, i3_2] = P2().apply(graph, [i1_1])

    visualize_graph_3d(graph)
    pyplot.show()

    visualize_graph_layer(graph, 2)
    pyplot.show()
示例#32
0
    def add_to_graph(self, graph: nx.Graph):
        # Add this room to the graph
        graph.add_node(self.id,
                       type=int(self.entity_type),
                       value=int(self.type))

        for door in self.door_map.values():
            if door and door.type != DoorType.BARRICADE:
                graph.add_edge(self.id,
                               door.connection.id,
                               type=int(door.type))

        if self.treasure is not None:
            graph.add_node(self.treasure.id,
                           type=int(self.treasure.entity_type),
                           value=int(self.treasure.value))
            graph.add_edge(self.id, self.treasure.id)

        for enemy in self.enemies:
            graph.add_node(enemy.id,
                           type=int(enemy.entity_type),
                           value=int(enemy.enemy_type))
            graph.add_edge(self.id, enemy.id)
示例#33
0
def modularity_components(
    graph: nx.Graph,
    partitions: Dict[Any, int],
    weight_attribute: str = "weight",
    resolution: float = 1.0,
) -> Dict[int, float]:
    """
    Given an undirected, weighted graph and a community partition dictionary,
    calculates a modularity quantum for each community ID. The sum of these quanta
    is the modularity of the graph and partitions provided.

    Parameters
    ----------
    graph : nx.Graph
        An undirected graph
    partitions : Dict[Any, int]
        A dictionary representing a community partitioning scheme with the keys being
        the vertex and the value being a community id.
    weight_attribute : str
        The edge data attribute on the graph that contains a float weight for the edge.
    resolution : float
        The resolution to use when calculating the modularity.

    Returns
    -------
    Dict[int, float]
        A dictionary of the community id to the modularity component of that community

    Raises
    ------
    TypeError
        If ``graph`` is not a networkx Graph or
        If ``partitions`` is not a dictionary or
        If ``resolution`` is not a float
    ValueError
        If ``graph`` is unweighted
        If ``graph`` is directed
        If ``graph`` is a multigraph
    """
    _assertions(graph, partitions, weight_attribute, resolution)

    total_edge_weight = 0.0

    communities = set(partitions.values())

    degree_sums_within_community: Dict[int, float] = defaultdict(lambda: 0.0)
    degree_sums_for_community: Dict[int, float] = defaultdict(lambda: 0.0)
    for vertex, neighbor_vertex, weight in graph.edges(data=weight_attribute):
        vertex_community = partitions[vertex]
        neighbor_community = partitions[neighbor_vertex]
        if vertex_community == neighbor_community:
            if vertex == neighbor_vertex:
                degree_sums_within_community[vertex_community] += weight
            else:
                degree_sums_within_community[vertex_community] += weight * 2.0
        degree_sums_for_community[vertex_community] += weight
        degree_sums_for_community[neighbor_community] += weight
        total_edge_weight += weight

    return {
        comm: _modularity_component(
            degree_sums_within_community[comm],
            degree_sums_for_community[comm],
            total_edge_weight,
            resolution,
        )
        for comm in communities
    }
示例#34
0
    def merge(self, readset):
        """
        Return a set of reads after merging together subsets of reads
        (into super reads) from an input readset according to a
        probabilistic model of how likely sets of reads are to appear
        together on one haplotype and on opposite haplotypes.
        readset -- the input .core.ReadSet object
        error_rate -- the probability that a nucleotide is wrong
        max_error_rate -- the maximum error rate of any edge of the read
        merging graph allowed before we discard it
        threshold -- the threshold of the ratio between the probabilities
        that a pair ' 'of reads come from the same haplotype and different
        haplotypes
        neg_threshold -- The threshold of the ratio between the
        probabilities that a pair of reads come from the same haplotype
        and different haplotypes.
        """
        logger.info(
            "Merging %d reads with error rate %.2f, maximum error rate %.2f, "
            "positive threshold %d and negative threshold %d ...",
            len(readset),
            self._error_rate,
            self._max_error_rate,
            self._positive_threshold,
            self._negative_threshold,
        )
        logger.debug("Merging started.")
        gblue = Graph()
        gred = Graph()
        gnotblue = Graph()
        gnotred = Graph()

        # Probability that any nucleotide is wrong
        error_rate = self._error_rate
        logger.debug("Error Rate: %s", error_rate)

        # If an edge has too many errors, we discard it since it is not reliable
        max_error_rate = self._max_error_rate
        logger.debug("Max Error Rate: %s", max_error_rate)

        # Threshold of the ratio between the probabilities that the two reads come from
        # the same side or from different sides
        thr = self._positive_threshold
        logger.debug("Positive Threshold: %s", thr)

        # Threshold_neg is a more conservative threshold for the evidence
        # that two reads should not be clustered together.
        thr_neg = self._negative_threshold
        logger.debug("Negative Threshold: %s", thr_neg)

        thr_diff = 1 + int(log(thr, (1 - error_rate) / (error_rate / 3)))
        thr_neg_diff = 1 + int(
            log(thr_neg, (1 - error_rate) / (error_rate / 3)))
        logger.debug("Thr. Diff.: %s - Thr. Neg. Diff.: %s", thr_diff,
                     thr_neg_diff)

        logger.debug("Start reading the reads...")
        id = 0
        orig_reads = {}
        queue = {}
        reads = {}
        for read in readset:
            id += 1
            begin_str = read[0][0]
            snps = []
            orgn = []
            for variant in read:

                site = variant[0]
                zyg = variant[1]
                qual = variant[2]

                orgn.append([str(site), str(zyg), str(qual)])
                if int(zyg) == 0:
                    snps.append("G")
                else:
                    snps.append("C")

            begin = int(begin_str)
            end = begin + len(snps)
            orig_reads[id] = orgn

            gblue.add_node(id, begin=begin, end=end, sites="".join(snps))
            gnotblue.add_node(id, begin=begin, end=end, sites="".join(snps))
            gred.add_node(id, begin=begin, end=end, sites="".join(snps))
            gnotred.add_node(id, begin=begin, end=end, sites="".join(snps))
            queue[id] = {"begin": begin, "end": end, "sites": snps}
            reads[id] = {"begin": begin, "end": end, "sites": snps}
            for x in [id for id in queue.keys() if queue[id]["end"] <= begin]:
                del queue[x]
            for id1 in queue.keys():
                if id == id1:
                    continue
                match, mismatch = eval_overlap(queue[id1], queue[id])
                if (match + mismatch >= thr_neg_diff and min(match, mismatch) /
                    (match + mismatch) <= max_error_rate
                        and match - mismatch >= thr_diff):
                    gblue.add_edge(id1, id, match=match, mismatch=mismatch)
                    if mismatch - match >= thr_diff:
                        gred.add_edge(id1, id, match=match, mismatch=mismatch)
                    if match - mismatch >= thr_neg_diff:
                        gnotred.add_edge(id1,
                                         id,
                                         match=match,
                                         mismatch=mismatch)
                    if mismatch - match >= thr_neg_diff:
                        gnotblue.add_edge(id1,
                                          id,
                                          match=match,
                                          mismatch=mismatch)

        logger.debug("Finished reading the reads.")
        logger.debug("Number of reads: %s", id)
        logger.debug("Blue Graph")
        logger.debug(
            "Nodes: %s - Edges: %s - ConnComp: %s",
            number_of_nodes(gblue),
            number_of_edges(gblue),
            len(list(connected_components(gblue))),
        )
        logger.debug("Non-Blue Graph")
        logger.debug(
            "Nodes: %s - Edges: %s - ConnComp: %s",
            number_of_nodes(gnotblue),
            number_of_edges(gnotblue),
            len(list(connected_components(gnotblue))),
        )
        logger.debug("Red Graph")
        logger.debug(
            "Nodes: %s - Edges: %s - ConnComp: %s",
            number_of_nodes(gred),
            number_of_edges(gred),
            len(list(connected_components(gred))),
        )
        logger.debug("Non-Red Graph")
        logger.debug(
            "Nodes: %s - Edges: %s - ConnComp: %s",
            number_of_nodes(gnotred),
            number_of_edges(gnotred),
            len(list(connected_components(gnotred))),
        )

        # We consider the notblue edges as an evidence that two reads
        # should not be merged together
        # Since we want to merge each blue connected components into
        # a single superread, we check each notblue edge (r1, r2) and
        # we remove some blue edges so that r1 and r2 are not in the
        # same blue connected component

        blue_component = {}
        current_component = 0
        for conncomp in connected_components(gblue):
            for v in conncomp:
                blue_component[v] = current_component
            current_component += 1

        # Keep only the notblue edges that are inside a blue connected component
        good_notblue_edges = [(v, w) for (v, w) in gnotblue.edges()
                              if blue_component[v] == blue_component[w]]

        for (u, v) in good_notblue_edges:
            while v in node_connected_component(gblue, u):
                path = shortest_path(gblue, source=u, target=v)
                # Remove the edge with the smallest support
                # A better strategy is to weight each edge with -log p
                # and remove the minimum (u,v)-cut
                w, x = min(
                    zip(path[:-1], path[1:]),
                    key=lambda p: gblue[p[0]][p[1]]["match"] - gblue[p[0]][p[
                        1]]["mismatch"],
                )
                gblue.remove_edge(w, x)

        # Merge blue components (somehow)
        logger.debug("Started Merging Reads...")
        superreads = {}  # superreads given by the clusters (if clustering)
        rep = {}  # cluster representative of a read in a cluster

        for cc in connected_components(gblue):
            if len(cc) > 1:
                r = min(cc)
                superreads[r] = {}
                for id in cc:
                    rep[id] = r

        for id in orig_reads:
            if id in rep:
                for tok in orig_reads[id]:
                    site = int(tok[0])
                    zyg = int(tok[1])
                    qual = int(tok[2])
                    r = rep[id]
                    if site not in superreads[r]:
                        superreads[r][site] = [0, 0]
                    superreads[r][site][zyg] += qual

            merged_reads = ReadSet()
            readn = 0
            for id in orig_reads:
                read = Read("read" + str(readn))
                readn += 1
                if id in rep:
                    if id == rep[id]:
                        for site in sorted(superreads[id]):
                            z = superreads[id][site]
                            if z[0] >= z[1]:
                                read.add_variant(site, 0, z[0] - z[1])

                            elif z[1] > z[0]:
                                read.add_variant(site, 1, z[1] - z[0])
                        merged_reads.add(read)
                else:
                    for tok in orig_reads[id]:
                        read.add_variant(int(tok[0]), int(tok[1]), int(tok[2]))
                    merged_reads.add(read)

        logger.debug("Finished merging reads.")
        logger.info(
            "... after merging: merged %d reads into %d reads",
            len(readset),
            len(merged_reads),
        )

        return merged_reads
示例#35
0
def loss_hamiltonian(graph: nx.Graph) -> Hamiltonian:
    r"""Calculates the loss Hamiltonian for the maximum-weighted cycle problem.

    We consider the problem of selecting a cycle from a graph that has the greatest product of edge
    weights, as outlined `here <https://1qbit.com/whitepaper/arbitrage/>`__. The product of weights
    of a subset of edges in a graph is given by

    .. math:: P = \prod_{(i, j) \in E} [(c_{ij} - 1)x_{ij} + 1]

    where :math:`E` are the edges of the graph, :math:`x_{ij}` is a binary number that selects
    whether to include the edge :math:`(i, j)` and :math:`c_{ij}` is the corresponding edge weight.
    Our objective is to maximimize :math:`P`, subject to selecting the :math:`x_{ij}` so that
    our subset of edges composes a cycle.

    The product of edge weights is maximized by equivalently considering

    .. math:: \sum_{(i, j) \in E} x_{ij}\log c_{ij},

    assuming :math:`c_{ij} > 0`.

    This can be restated as a minimization of the expectation value of the following qubit
    Hamiltonian:

    .. math::

        H = \sum_{(i, j) \in E} Z_{ij}\log c_{ij}.

    where :math:`Z_{ij}` is a qubit Pauli-Z matrix acting upon the wire specified by the edge
    :math:`(i, j)`. Mapping from edges to wires can be achieved using :func:`~.edges_to_wires`.

    .. note::
        The expectation value of the returned Hamiltonian :math:`H` is not equal to :math:`P`, but
        minimizing the expectation value of :math:`H` is equivalent to maximizing :math:`P`.

        Also note that the returned Hamiltonian does not impose that the selected set of edges is
        a cycle. This constraint can be enforced using a penalty term or by selecting a QAOA
        mixer Hamiltonian that only transitions between states that correspond to cycles.

    **Example**

    >>> import networkx as nx
    >>> g = nx.complete_graph(3).to_directed()
    >>> edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(g.edges)}
    >>> for k, v in edge_weight_data.items():
            g[k[0]][k[1]]["weight"] = v
    >>> h = loss_hamiltonian(g)
    >>> print(h)
      (-0.6931471805599453) [Z0]
    + (0.0) [Z1]
    + (0.4054651081081644) [Z2]
    + (0.6931471805599453) [Z3]
    + (0.9162907318741551) [Z4]
    + (1.0986122886681098) [Z5]

    Args:
        graph (nx.Graph): the graph specifying possible edges

    Returns:
        qml.Hamiltonian: the loss Hamiltonian

    Raises:
        ValueError: if the graph contains self-loops
        KeyError: if one or more edges do not contain weight data
    """
    edges_to_qubits = edges_to_wires(graph)
    coeffs = []
    ops = []

    edges_data = graph.edges(data=True)

    for edge_data in edges_data:
        edge = edge_data[:2]

        if edge[0] == edge[1]:
            raise ValueError("Graph contains self-loops")

        try:
            weight = edge_data[2]["weight"]
        except KeyError as e:
            raise KeyError(f"Edge {edge} does not contain weight data") from e

        coeffs.append(np.log(weight))
        ops.append(qml.PauliZ(wires=edges_to_qubits[edge]))

    return Hamiltonian(coeffs, ops)
示例#36
0
class Gratoms(Atoms):
    """Graph based atoms object.

    An Integrated class for an ASE atoms object with a corresponding
    Networkx Graph.
    """

    def __init__(self,
                 symbols=None,
                 positions=None,
                 numbers=None,
                 tags=None,
                 momenta=None,
                 masses=None,
                 magmoms=None,
                 charges=None,
                 scaled_positions=None,
                 cell=None,
                 pbc=None,
                 celldisp=None,
                 constraint=None,
                 calculator=None,
                 info=None,
                 edges=None):
        super().__init__(symbols, positions, numbers, tags, momenta, masses,
                         magmoms, charges, scaled_positions, cell, pbc,
                         celldisp, constraint, calculator, info)

        if self.pbc.any():
            self._graph = MultiGraph()
        else:
            self._graph = Graph()

        nodes = [[i, {
            'number': n
        }] for i, n in enumerate(self.arrays['numbers'])]
        self._graph.add_nodes_from(nodes)

        if edges:
            self._graph.add_edges_from(edges, bonds=1)

        self._surface_atoms = None

    @property
    def graph(self):
        return self._graph

    @property
    def nodes(self):
        return self._graph.nodes

    @property
    def edges(self):
        return self._graph.edges

    @property
    def adj(self):
        return self._graph.adj

    def get_surface_atoms(self):
        """Return surface atoms."""
        return self._surface_atoms

    def set_surface_atoms(self, surface_atoms):
        """Assign surface atoms."""
        self._surface_atoms = surface_atoms

    def get_neighbor_symbols(self, u):
        """Get chemical symbols for neighboring atoms of u."""
        neighbors = list(self._graph[u])

        return sym[self.arrays['numbers'][neighbors]]

    def is_isomorph(self, other):
        """Check if isomorphic by bond count and atomic number."""
        isomorphic = nx.is_isomorphic(
            self._graph, other._graph, edge_match=em, node_match=nm)

        return isomorphic

    def get_chemical_tags(self, rank=2):
        """Generate a hash descriptive of the chemical formula (rank 0)
        or include bonding (rank 1).
        """
        cnt = np.bincount(self.arrays['numbers'])
        composition = ','.join(cnt.astype(str))

        if rank == 1:
            return composition[2:]

        for adj in self.adj.items():

            num = self.arrays['numbers'][list(adj[1].keys())]
            cnt += np.bincount(num, minlength=len(cnt))

        bonding = ','.join(cnt.astype(str))

        return composition[2:], bonding[2:]

    def get_unsaturated_nodes(self, screen=None):

        unsaturated = []
        for node, data in self.nodes(data=True):
            radicals = data['valence']

            if screen in data:
                continue

            if radicals > 0:
                unsaturated += [node]

        return np.array(unsaturated)

    def copy(self):
        """Return a copy."""
        atoms = self.__class__(cell=self._cell, pbc=self._pbc, info=self.info)

        atoms.arrays = {}
        for name, a in self.arrays.items():
            atoms.arrays[name] = a.copy()
        atoms.constraints = copy.deepcopy(self.constraints)
        atoms._graph = self.graph.copy()

        return atoms

    def __iadd__(self, other):
        """Extend atoms object by appending atoms from *other*."""
        if isinstance(other, Atom):
            other = self.__class__([other])

        n1 = len(self)
        n2 = len(other)

        for name, a1 in self.arrays.items():
            a = np.zeros((n1 + n2, ) + a1.shape[1:], a1.dtype)
            a[:n1] = a1
            if name == 'masses':
                a2 = other.get_masses()
            else:
                a2 = other.arrays.get(name)
            if a2 is not None:
                a[n1:] = a2
            self.arrays[name] = a

        for name, a2 in other.arrays.items():
            if name in self.arrays:
                continue
            a = np.empty((n1 + n2, ) + a2.shape[1:], a2.dtype)
            a[n1:] = a2
            if name == 'masses':
                a[:n1] = self.get_masses()[:n1]
            else:
                a[:n1] = 0

            self.set_array(name, a)

        if isinstance(other, Gratoms):
            if isinstance(self._graph, nx.MultiGraph) & \
               isinstance(other._graph, nx.Graph):
                other._graph = nx.MultiGraph(other._graph)

            self._graph = nx.disjoint_union(self._graph, other._graph)

        return self

    def __delitem__(self, i):
        from ase.constraints import FixAtoms
        for c in self._constraints:
            if not isinstance(c, FixAtoms):
                raise RuntimeError('Remove constraint using set_constraint() '
                                   'before deleting atoms.')

        if isinstance(i, (list, int)):
            # Make sure a list of booleans will work correctly and not be
            # interpreted at 0 and 1 indices.
            i = np.atleast_1d(i)

        if len(self._constraints) > 0:
            n = len(self)
            i = np.arange(n)[i]
            if isinstance(i, int):
                i = [i]
            constraints = []
            for c in self._constraints:
                c = c.delete_atoms(i, n)
                if c is not None:
                    constraints.append(c)
            self.constraints = constraints

        mask = np.ones(len(self), bool)
        mask[i] = False

        for name, a in self.arrays.items():
            self.arrays[name] = a[mask]

        if isinstance(i, slice):
            i = np.arange(n)[i]

        self._graph.remove_nodes_from(i)
        mapping = dict(zip(np.where(mask)[0], np.arange(len(self))))
        nx.relabel_nodes(self._graph, mapping, copy=False)

    def __imul__(self, m):
        """In-place repeat of atoms."""
        if isinstance(m, int):
            m = (m, m, m)

        for x, vec in zip(m, self._cell):
            if x != 1 and not vec.any():
                raise ValueError(
                    'Cannot repeat along undefined lattice vector')

        if self.pbc.any() and len(self.edges()) > 0:
            raise ValueError("Edge conservation not currently supported with "
                             "pbc. Remove pbc or edges first.")

        M = np.product(m)
        n = len(self)

        for name, a in self.arrays.items():
            self.arrays[name] = np.tile(a, (M, ) + (1, ) * (len(a.shape) - 1))
            cgraph = self._graph.copy()

        positions = self.arrays['positions']
        i0 = 0

        for m0 in range(m[0]):
            for m1 in range(m[1]):
                for m2 in range(m[2]):
                    i1 = i0 + n
                    positions[i0:i1] += np.dot((m0, m1, m2), self._cell)
                    i0 = i1
                    if m0 + m1 + m2 != 0:
                        self._graph = nx.disjoint_union(self._graph, cgraph)

        if self.constraints is not None:
            self.constraints = [c.repeat(m, n) for c in self.constraints]

        self._cell = np.array([m[c] * self._cell[c] for c in range(3)])

        return self
示例#37
0
文件: map.py 项目: esirK/mediacloud
def run_fa2_layout(graph: nx.Graph, memory_limit_mb: int) -> None:
    """Generate force atlas 2 layout for the graph.

    Run an external java library on the graph to assign a position to each node.

    Assign a 'position' attribute to each node in the graph that is a [x, y] tuple.
    """

    with tempfile.TemporaryDirectory('topic_map') as temp_dir:

        input_file = os.path.join(temp_dir, 'input.gexf')
        output_template = os.path.join(temp_dir, 'output')
        output_file = output_template + ".txt"

        export_graph = graph.copy()
        for node in export_graph.nodes(data=True):
            for key in list(node[1].keys()):
                del node[1][key]

        nx.write_gexf(export_graph, input_file)

        log.info("running layout...")

        output = subprocess.check_output(
            [
                "java",
                "-Djava.awt.headless=true",
                f"-Xmx{memory_limit_mb}m",
                "-cp",
                "/opt/fa2l/forceatlas2.jar:/opt/fa2l/gephi-toolkit.jar",
                "kco.forceatlas2.Main",
                "--input",
                input_file,
                "--targetChangePerNode",
                "0.5",
                "--output",
                output_template,
                "--directed",
                # "--scalingRatio", "10",
                # "--gravity", "100",
                "--2d"
            ], )

        assert isinstance(output, bytes)
        output = output.decode('utf-8', errors='replace')

        log.info(f"fa2 layout: {str(output)}")

        f = open(output_file)
        lines = f.readlines()

        del lines[0]

        for line in lines:
            (i, x, y) = line.split()

            i = int(i)
            x = float(x)
            y = float(y)

            graph.nodes[i]['position'] = [x, y]
示例#38
0
def add_vertex(graph: nx.Graph, n: int) -> nx.Graph:
    for i in range(n):
        graph.add_node(i)
    return graph
示例#39
0
def resize(subgraph: list, graph: nx.Graph, min_size: int,
           max_size: int) -> dict:
    """Resize a subgraph to a range of input sizes.

    This function uses a greedy approach to iteratively add or remove nodes one at a time to an
    input subgraph to reach the range of sizes specified by ``min_size`` and ``max_size``.

    When growth is required, the algorithm examines all nodes from the remainder of the graph as
    candidates and adds the single node with the highest degree relative to the rest of the
    subgraph. This results in a graph that is one node larger, and if growth is still required,
    the algorithm performs the procedure again.

    When shrinking is required, the algorithm examines all nodes from within the subgraph as
    candidates and removes the single node with lowest degree relative to the subgraph. In both
    growth and shrink phases, ties for addition/removal with nodes of equal degree are settled by
    uniform random choice.

    **Example usage:**

    >>> s = data.Planted()
    >>> g = nx.Graph(s.adj)
    >>> s = [20, 21, 22, 23, 24, 25, 26, 27, 28, 29]
    >>> resize(s, g, 8, 12)
    {10: [20, 21, 22, 23, 24, 25, 26, 27, 28, 29],
     11: [11, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29],
     12: [0, 11, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29],
     9: [20, 21, 22, 24, 25, 26, 27, 28, 29],
     8: [20, 21, 22, 24, 25, 26, 27, 29]}

    Args:
        subgraph (list[int]): a subgraph specified by a list of nodes
        graph (nx.Graph): the input graph
        min_size (int): minimum size for subgraph to be resized to
        max_size (int): maximum size for subgraph to be resized to

    Returns:
        dict[int, list[int]]: a dictionary of different sizes with corresponding subgraph
    """
    nodes = graph.nodes()
    subgraph = set(subgraph)

    if not subgraph.issubset(nodes):
        raise ValueError("Input is not a valid subgraph")
    if min_size < 1:
        raise ValueError("min_size must be at least 1")
    if max_size >= len(nodes):
        raise ValueError("max_size must be less than number of nodes in graph")
    if max_size < min_size:
        raise ValueError("max_size must not be less than min_size")

    starting_size = len(subgraph)

    if min_size <= starting_size <= max_size:
        resized = {starting_size: sorted(subgraph)}
    else:
        resized = {}

    if max_size > starting_size:

        grow_subgraph = graph.subgraph(subgraph).copy()

        while grow_subgraph.order() < max_size:
            grow_nodes = grow_subgraph.nodes()
            complement_nodes = nodes - grow_nodes

            degrees = [(c, graph.subgraph(list(grow_nodes) + [c]).degree()[c])
                       for c in complement_nodes]
            np.random.shuffle(degrees)

            to_add = max(degrees, key=lambda x: x[1])
            grow_subgraph.add_node(to_add[0])

            new_size = grow_subgraph.order()

            if min_size <= new_size <= max_size:
                resized[new_size] = sorted(grow_subgraph.nodes())

    if min_size < starting_size:

        shrink_subgraph = graph.subgraph(subgraph).copy()

        while shrink_subgraph.order() > min_size:
            degrees = list(shrink_subgraph.degree())
            np.random.shuffle(degrees)

            to_remove = min(degrees, key=lambda x: x[1])
            shrink_subgraph.remove_node(to_remove[0])

            new_size = shrink_subgraph.order()

            if min_size <= new_size <= max_size:
                resized[new_size] = sorted(shrink_subgraph.nodes())

    return resized
示例#40
0
 def metric(graph: Graph) -> Node:
     nodes = list(graph.nodes())
     ranking = (score(graph, node) for node in nodes)
     best = max(ranking, key=lambda r: r.score)
     return best.node
def add_hyperedge_edges(graph: Graph, hyperedge_id: UUID,
                        neighbors: List[Tuple[int, int]]) -> None:
    for neighbor in neighbors:
        graph.add_edge(hyperedge_id, get_node_id(neighbor))
__author__ = 'devashishthakur'
import networkx as nx
from networkx.algorithms import bipartite
from networkx import Graph
import random
from decimal import Decimal
import statistics
import json
import rpyExample

from networkx.readwrite import json_graph

pageRankNodeMap = {}
random_bipartite_graph = Graph()
bipartite_size = 1000
random_bipartite_graph = nx.bipartite_random_graph(bipartite_size,
                                                   bipartite_size, .33)
corrupted_node = {}

x, y = bipartite.sets(random_bipartite_graph)

# number_of_fake_nodes = 5
min_value_x = min(x)
max_value = max(y)
min_value = min(y)

count = 1

degreeMap = nx.degree(random_bipartite_graph)
sortedval = sorted(degreeMap.items(), key=lambda x: x[1], reverse=True)
示例#43
0
def make_steiner_tree(G, voi, generator=None):
    mst = Graph()
    for v in voi:
        if not v in G:
            raise ValueError, "make_steiner_tree(): Some vertice not in original graph"
    if len(voi) == 0:
        return mst
    if len(voi) == 1:
        mst.add_node(voi[0])
        return mst

    # Initially, use (a version of) Kruskal's algorithm to extract a minimal spanning tree
    # from a weighted graph.  This algorithm differs in that only a subset of vertices are
    # going to be present in the final subgraph (which is not truely a MST - must use Prim's
    # algorithm later.

    # extract all shortest paths among the voi
    heapq = []
    paths = {}

    # load all the paths bwteen the Steiner vertices. Store them in a heap queue
    # and reconstruct the MST of the complete graph using Kruskal's algorithm
    for i in range(len(voi) - 1):
        v1 = voi[i]
        for v2 in voi[i + 1:]:
            result = bidirectional_dijkstra(G, v1, v2)
            if result == False:
                raise RuntimeError, "The two vertices given (%s, %s) don't exist on the same connected graph" % (
                    v1, v2)
                #print "The two vertices given (%s, %s) don't exist on the same connected graph" % (v1, v2)
            distance, vertList = result
            keys = [v1, v2]
            keys.sort()
            key = "%s:%s" % tuple(keys)
            paths[key] = (vertList)
            heappush(heapq, (distance, v1, v2))

    # construct the minimum spanning tree of the complete graph
    while heapq:
        w, v1, v2 = heappop(heapq)
        # if no path exists yet between v1 and v2, add this one
        if v1 not in mst or v2 not in mst or not has_path(mst, v1, v2):
            mst.add_edge(v1, v2, weight=w)

    # check if the graph is tree and correct
    sTree = set(mst.nodes())
    sSteiner = set(voi)
    if sTree ^ sSteiner:
        raise RuntimeError, 'Failed to construct MST spanning tree'

    # reconstruct subgraph of origGraph using the paths
    if generator is None:
        subgraph = Graph()
    else:
        subgraph = generator()
    for edge in mst.edges_iter(data=True):
        keys = [edge[0], edge[1]]
        keys.sort()
        key = "%s:%s" % tuple(keys)
        vList = paths[key]
        for i in range(len(vList) - 1):
            v1 = vList[i]
            v2 = vList[i + 1]
            w = G[v1][v2]
            subgraph.add_edge(v1, v2, w)
    # get rid of possible loops - result will be a true MST
    subgraph = make_prim_mst(subgraph, generator)

    # remove intermediate nodes in paths that are not in list of voi
    return _trimTree(subgraph, voi)
def pick_random_node(tree: nx.Graph) -> int:
    """ Picks a random node from the given graph and returns it. """
    size = tree.size()
    return tree.nodes()[random.randint(0, size - 1)]
示例#45
0
def get_neighbors_at(graph: Graph, vertex, layer):
    """
    Returns neighbors of the given `vertex` that lies on the layer `layer`.
    """
    neighbors = list(graph.neighbors(vertex))
    return [v for v in neighbors if graph.nodes[v]['layer'] == layer]
示例#46
0
            else:
                dist[neighbour_node] = dist[current_node] * graph[
                    current_node][neighbour_node]['weight']
                if dist[neighbour_node] > threshold:
                    if not neighbour_node in path:
                        path[neighbour_node] = [i for i in path[current_node]]
                    path[neighbour_node].append(neighbour_node)
                    self.__get_inverted_distance_node(color, graph, dist, path,
                                                      neighbour_node,
                                                      threshold)


if __name__ == '__main__':
    number_nodes = 10
    graph = Graph()
    graph.add_nodes_from(xrange(10))
    # 2: {'weight': 0.7011249819454258}, 0: {'weight': 0.7011249819454258}
    # f = [{1: {'weight': 0.2881826371807402}, 3: {'weight': 0.34637130968632907}, 4: {'weight': 0.3373179295465066}, 5: {'weight': 0.7030568062038827}, 6: {'weight': 0.6594891222589647}, 7: {'weight': 2.9324258260856147e-06}, 8: {'weight': 0.3423324176201711}, 9: {'weight': 0.5055209844804006}},
    #      {0: {'weight': 0.2881826371807402}, 2: {'weight': 0.20813552209346683}, 3: {'weight': 0.1276147823907101}, 4: {'weight': 0.8396978110965048}, 5: {'weight': 0.4169765394357211}, 6: {'weight': 0.2097257037624973}, 7: {'weight': 0.0012854431017619284}, 8: {'weight': 0.14204922253105837}, 9: {'weight': 0.627459408360586}},
    #      {1: {'weight': 0.20813552209346683}, 3: {'weight': 0.29950731559293586}, 4: {'weight': 0.21276454980590903}, 5: {'weight': 0.454212810905543}, 6: {'weight': 0.5522877977826267}, 7: {'weight': 8.536905374239662e-07}, 8: {'weight': 0.43499464362631024}, 9: {'weight': 0.4123855888378498}}, {0: {'weight': 0.34637130968632907}, 1: {'weight': 0.1276147823907101}, 2: {'weight': 0.29950731559293586}, 4: {'weight': 0.2191517039620457}, 5: {'weight': 0.37785736224977384}, 6: {'weight': 0.422838699175566}, 7: {'weight': 1.5333398181677222e-07}, 8: {'weight': 0.6713928437420265}, 9: {'weight': 0.13113707108395709}}, {0: {'weight': 0.3373179295465066}, 1: {'weight': 0.8396978110965048}, 2: {'weight': 0.21276454980590903}, 3: {'weight': 0.2191517039620457}, 5: {'weight': 0.5177722633063939}, 6: {'weight': 0.24083944074024846}, 7: {'weight': 0.00048113689661720963}, 8: {'weight': 0.25365338097875784}, 9: {'weight': 0.5926547364395025}}, {0: {'weight': 0.7030568062038827}, 1: {'weight': 0.4169765394357211}, 2: {'weight': 0.454212810905543}, 3: {'weight': 0.37785736224977384}, 4: {'weight': 0.5177722633063939}, 6: {'weight': 0.7550023506244243}, 7: {'weight': 2.4634718352311803e-05}, 8: {'weight': 0.492554892126609}, 9: {'weight': 0.38876518211543454}}, {0: {'weight': 0.6594891222589647}, 1: {'weight': 0.2097257037624973}, 2: {'weight': 0.5522877977826267}, 3: {'weight': 0.422838699175566}, 4: {'weight': 0.24083944074024846}, 5: {'weight': 0.7550023506244243}, 7: {'weight': 1.8471292550632583e-06}, 8: {'weight': 0.49849003473179043}, 9: {'weight': 0.20550418983997462}}, {0: {'weight': 2.9324258260856147e-06}, 1: {'weight': 0.0012854431017619284}, 2: {'weight': 8.536905374239662e-07}, 3: {'weight': 1.5333398181677222e-07}, 4: {'weight': 0.00048113689661720963}, 5: {'weight': 2.4634718352311803e-05}, 6: {'weight': 1.8471292550632583e-06}, 8: {'weight': 3.596038724859438e-07}, 9: {'weight': 9.381764527550599e-05}}, {0: {'weight': 0.3423324176201711}, 1: {'weight': 0.14204922253105837}, 2: {'weight': 0.43499464362631024}, 3: {'weight': 0.6713928437420265}, 4: {'weight': 0.25365338097875784}, 5: {'weight': 0.492554892126609}, 6: {'weight': 0.49849003473179043}, 7: {'weight': 3.596038724859438e-07}, 9: {'weight': 0.12563826563961714}}, {0: {'weight': 0.5055209844804006}, 1: {'weight': 0.627459408360586}, 2: {'weight': 0.4123855888378498}, 3: {'weight': 0.13113707108395709}, 4: {'weight': 0.5926547364395025}, 5: {'weight': 0.38876518211543454}, 6: {'weight': 0.20550418983997462}, 7: {'weight': 9.381764527550599e-05}, 8: {'weight': 0.12563826563961714}}]
    f = [{1: {'weight': 0.5}, 2: {'weight': 0.5}}, {3: {'weight': 0.5}}]
    for n, index in zip(f, xrange(10)):
        for k, v in n.iteritems():
            graph.add_edge(index, k, weight=v['weight'])

    print graph.edges(data=True)
    node = 0
    labeled_nodes = {1: ['a', 'b'], 2: ['a', 'c'], 9: ['a', 'b', 'd']}
    radius = 2
示例#47
0
class NetworkXGraphBackend(GenericGraphBackend):
    """
    A wrapper for NetworkX as the backend of a graph.

    DOCTEST:
        sage: import sage.graphs.base.graph_backends

    """

    _nxg = None

    def __init__(self, N=None):
        """
        Initialize the backend with NetworkX graph N.
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.iterator_edges([],True)
            <generator object iterator_edges at ...>
        """
        if N is None:
            import networkx
            N = networkx.MultiGraph()
        self._nxg = N

        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

    def add_edge(self, u, v, l, directed):
        """
        Add an edge (u,v) to self, with label l.  If directed is True, this is
        interpreted as an arc from u to v.
        
        INPUT:
            u,v:      vertices
            l:        edge label
            directed: boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.add_edge(1,2,'a',True)
        """

        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        if l:
            self._nxg.add_edge(u, v, weight=l)
        else:
            self._nxg.add_edge(u, v)

    def add_edges(self, edges, directed):
        """
        Add a sequence of edges to self.  If directed is True, these are
        interpreted as arcs.
        
        INPUT:
            edges:    iterator
            directed: boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.add_edges([],True)
        """
        for e in edges:
            try:
                u, v, l = e
            except ValueError:
                u, v = e
                l = None
            self.add_edge(u, v, l, directed)

    def add_vertex(self, name):
        """
        Add a labelled vertex to self.
        
        INPUT:

        - ``name``: vertex label
        
        OUTPUT:

        If ``name``=``None``, the new vertex name is returned. ``None`` otherwise.

        DOCTEST:

            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.add_vertex(0)
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        retval = None
        if name is None:  # then find an integer to use as a key
            i = 0
            while self.has_vertex(i):
                i = i + 1
            name = i
            retval = name

        self._nxg.add_node(name)

        return retval

    def add_vertices(self, vertices):
        """
        Add labelled vertices to self.

        INPUT:

        - ``vertices``: iterator of vertex labels. A new label is created, used and returned in
          the output list for all ``None`` values in ``vertices``.

        OUTPUT:

        Generated names of new vertices if there is at least one ``None`` value
        present in ``vertices``. ``None`` otherwise.

        EXAMPLES::

            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.add_vertices([1,2,3])
            sage: G.add_vertices([4,None,None,5])
            [0, 6]
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        vertices = list(vertices)
        nones = vertices.count(None)
        vertices = filter(lambda v: v is not None, vertices)
        self._nxg.add_nodes_from(vertices)

        new_names = []
        i = 0
        while nones > 0:
            while self.has_vertex(i):
                i += 1
            self._nxg.add_node(i)
            new_names.append(i)

            nones -= 1
            i += 1

        return new_names if new_names != [] else None

    def degree(self, v, directed):
        """
        Returns the total number of vertices incident to v.
        
        INPUT:
            v:       a vertex label
            directed: boolean
        OUTPUT:
            degree of v
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.add_vertices(range(3))
            sage: G.degree(1, False)
            0
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.degree(v)

    def del_edge(self, u, v, l, directed):
        """
        Deletes the edge (u,v) with label l.

        INPUT:
            u,v:      vertices
            l:        edge label
            directed: boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.del_edge(1,2,'a',True)
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        import networkx
        try:
            if self._nxg.is_multigraph():
                for k, d in self._nxg.edge[u][v].iteritems():
                    if d.get('weight', None) == l:
                        self._nxg.remove_edge(u, v, k)
                        break
            else:
                if l is None or self._nxg.edge[u][v].get('weight', None) == l:
                    self._nxg.remove_edge(u, v)
        except (KeyError, networkx.NetworkXError):
            pass

    def del_vertex(self, v):
        """
        Delete a labelled vertex in self.
        
        INPUT:
            v: vertex label
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.del_vertex(0)
            Traceback (most recent call last):
            ...
            NetworkXError: The node 0 is not in the graph.
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        self._nxg.remove_node(v)

    def del_vertices(self, vertices):
        """
        Delete labelled vertices in self.
        
        INPUT:
            vertices: iterator of vertex labels
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.del_vertices([1,2,3])
            Traceback (most recent call last):
            ...
            NetworkXError: The node 1 is not in the graph.
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        for v in vertices:
            self._nxg.remove_node(v)

    def get_edge_label(self, u, v):
        """
        Returns the edge label of (u,v).

        INPUT:
            u,v: vertex labels
        
        OUTPUT:
            label of (u,v)
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.get_edge_label(1,2)
            Traceback (most recent call last):
            ...
            NetworkXError: Edge (1,2) requested via get_edge_label does not exist.
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        try:
            E = self._nxg.edge[u][v]
        except KeyError:
            from networkx import NetworkXError
            raise NetworkXError(
                "Edge (%s,%s) requested via get_edge_label does not exist." %
                (u, v))

        if self._nxg.is_multigraph():
            return [e.get('weight', None) for e in E.itervalues()]
        else:
            return E.get('weight', None)

    def has_edge(self, u, v, l):
        """
        True if self has an edge (u,v) with label l.

        INPUT:
            u,v: vertex labels
            l: label
        
        OUTPUT:
            boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.has_edge(1,2,'a')
            False
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        if not self._nxg.has_edge(u, v):
            return False
        if l is None:
            return True
        if self._nxg.is_multigraph():
            return any(
                e.get('weight', None) == l
                for e in self._nxg.adj[u][v].itervalues())
        else:
            return any(e == l for e in self._nxg.adj[u][v].itervalues())

    def has_vertex(self, v):
        """
        True if self has a vertex with label v.

        INPUT:
            v: vertex label
        
        OUTPUT:
            boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.has_vertex(0)
            False
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.has_node(v)

    def iterator_edges(self, vertices, labels):
        """
        Iterate over the edges incident to a sequence of vertices. Edges are
        assumed to be undirected.
        
        INPUT:
            vertices:     a list of vertex labels
            labels:       boolean
            
        OUTPUT:
            a generator which yields edges, with or without labels 
            depending on the labels parameter.
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.iterator_edges([],True)
            <generator object iterator_edges at ...>
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        if labels:
            for u, v, d in self._nxg.edges_iter(data=True):
                if u in vertices or v in vertices:
                    yield (u, v, d.get('weight', None))
        else:
            for u, v in self._nxg.edges_iter():
                if u in vertices or v in vertices:
                    yield (u, v)

    def _iterator_in_edges_with_labels(self, vertices):
        """
        Iterate over the incoming edges incident to a sequence of vertices.
        Special case, only for internal use.

        EXAMPLE::

            sage: g = DiGraph(graphs.PetersenGraph(), implementation="networkx")._backend
            sage: sorted(list(g.iterator_in_edges([0,1], True)))
            [(0, 1, None), (1, 0, None), (2, 1, None), (4, 0, None), (5, 0, None), (6, 1, None)]
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        for u, v, d in self._nxg.in_edges_iter(vertices, data=True):
            yield (u, v, d.get('weight', None))

    def iterator_in_edges(self, vertices, labels):
        """
        Iterate over the incoming edges incident to a sequence of vertices.
        
        INPUT:
            vertices:     a list of vertex labels
            labels:       boolean
            
        OUTPUT:
            a generator which yields edges, with or without labels 
            depending on the labels parameter.
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: i = G.iterator_in_edges([],True)
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        if self._nxg.is_directed():
            if labels:
                return self._iterator_in_edges_with_labels(vertices)
            else:
                return self._nxg.in_edges_iter(vertices)
        else:
            return self.iterator_edges(vertices, labels)

    def _iterator_out_edges_with_labels(self, vertices):
        """
        Iterate over the outbound edges incident to a sequence of vertices.
        Special case, only for internal use.

        EXAMPLE::

            sage: g = DiGraph(graphs.PetersenGraph(), implementation="networkx")._backend
            sage: sorted(list(g.iterator_out_edges([0,1], True)))
            [(0, 1, None), (0, 4, None), (0, 5, None), (1, 0, None), (1, 2, None), (1, 6, None)]
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        for u, v, d in self._nxg.out_edges_iter(vertices, data=True):
            yield (u, v, d.get('weight', None))

    def iterator_out_edges(self, vertices, labels):
        """
        Iterate over the outbound edges incident to a sequence of vertices.
        
        INPUT:
            vertices:     a list of vertex labels
            labels:       boolean
            
        OUTPUT:
            a generator which yields edges, with or without labels 
            depending on the labels parameter.
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: i = G.iterator_out_edges([],True)
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        if self._nxg.is_directed():
            if labels:
                return self._iterator_out_edges_with_labels(vertices)
            else:
                return self._nxg.out_edges_iter(vertices)
        else:
            return self.iterator_edges(vertices, labels)

    def iterator_nbrs(self, v):
        """
        Iterate over the vertices adjacent to v.
        
        INPUT:
            v: vertex label
            
        OUTPUT:
            a generator which yields vertex labels
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.add_vertex(0)
            sage: G.iterator_nbrs(0)
            <dictionary-keyiterator object at ...>
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.neighbors_iter(v)

    def iterator_in_nbrs(self, v):
        """
        Iterate over the vertices u such that the edge (u,v) is in self
        (that is, predecessors of v).
        
        INPUT:
            v: vertex label
            
        OUTPUT:
            a generator which yields vertex labels
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.iterator_in_nbrs(0)
            Traceback (most recent call last):
            ...
            AttributeError: 'MultiGraph' object has no attribute 'predecessors_iter'
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.predecessors_iter(v)

    def iterator_out_nbrs(self, v):
        """
        Iterate over the vertices u such that the edge (v,u) is in self
        (that is, successors of v).
        
        INPUT:
            v: vertex label
            
        OUTPUT:
            a generator which yields vertex labels
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.iterator_out_nbrs(0)
            Traceback (most recent call last):
            ...
            AttributeError: 'MultiGraph' object has no attribute 'successors_iter'
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.successors_iter(v)

    def iterator_verts(self, verts):
        """
        Iterate over the vertices v with labels in verts.
        
        INPUT:
            vertex: vertex labels
            
        OUTPUT:
            a generator which yields vertices
            
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.iterator_verts(0)
            <generator object bunch_iter at ...>
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.nbunch_iter(verts)

    def loops(self, new):
        """
        Get/set whether or not self allows loops.        
        
        INPUT:
            new: boolean or None
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.loops(True)
            sage: G.loops(None)
            True
        """
        if new is None:
            return self._loops
        if new:
            self._loops = True
        else:
            self._loops = False

    def multiple_edges(self, new):
        """
        Get/set whether or not self allows multiple edges.
        
        INPUT:
            new: boolean or None
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.multiple_edges(True)
            sage: G.multiple_edges(None)
            True
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        from networkx import Graph, MultiGraph, DiGraph, MultiDiGraph
        if new is None:
            return self._nxg.is_multigraph()
        if new == self._nxg.is_multigraph():
            return
        if new:
            if self._nxg.is_directed():
                self._nxg = MultiDiGraph(self._nxg)
            else:
                self._nxg = MultiGraph(self._nxg)
        else:
            if self._nxg.is_directed():
                self._nxg = DiGraph(self._nxg)
            else:
                self._nxg = Graph(self._nxg)

    def name(self, new):
        """
        Get/set name of self.
        
        INPUT:
            new: string or None
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.name("A NetworkX Graph")
            sage: G.name(None)
            'A NetworkX Graph'
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        if new is None:
            return self._nxg.name
        self._nxg.name = new

    def num_edges(self, directed):
        """
        The number of edges in self
        
        INPUT:
            directed: boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.num_edges(True)
            0
            sage: G.num_edges(False)
            0
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.size()

    def num_verts(self):
        """
        The number of vertices in self
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.num_verts()
            0
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        return self._nxg.order()

    def relabel(self, perm, directed):
        """
        Relabel the vertices of self by a permutation.
        INPUT:
            perm:     permutation
            directed: boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.relabel([],False)
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        from networkx import relabel_nodes
        name = self._nxg.name
        self._nxg = relabel_nodes(self._nxg, perm)
        self._nxg.name = name

#         if directed:
#             oldsucc = self._nxg.succ
#             oldpred = self._nxg.pred
#             newsucc = {}
#             newpred = {}
#             for v in oldsucc.iterkeys():
#                 oldtempsucc = oldsucc[v]
#                 newtempsucc = {}
#                 for w in oldtempsucc.iterkeys():
#                     newtempsucc[perm[w]] = oldtempsucc[w]
#                 newsucc[perm[v]] = newtempsucc
#             for v in oldpred.iterkeys():
#                 oldtemppred = oldpred[v]
#                 newtemppred = {}
#                 for w in oldtemppred.iterkeys():
#                     newtemppred[perm[w]] = oldtemppred[w]
#                 newpred[perm[v]] = newtemppred
#             self._nxg.adj = newsucc
#             self._nxg.succ = self._nxg.adj
#             self._nxg.pred = newpred
#         else:
#             oldd = self._nxg.adj
#             newd = {}
#             for v in oldd.iterkeys():
#                 oldtempd = oldd[v]
#                 newtempd = {}
#                 for w in oldtempd.iterkeys():
#                     newtempd[perm[w]] = oldtempd[w]
#                 newd[perm[v]] = newtempd
#             self._nxg.adj = newd

    def set_edge_label(self, u, v, l, directed):
        """
        Label the edge (u,v) by l.
        
        INPUT:
            u,v:      vertices
            l:        edge label
            directed: boolean
        
        DOCTEST:
            sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend()
            sage: G.set_edge_label(1,2,'a',True)
        """
        try:
            assert (not isinstance(
                self._nxg,
                (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated)))
        except AssertionError:
            self._nxg = self._nxg.mutate()

        if not self.has_edge(u, v, None):
            return
        if self.multiple_edges(None):
            self._nxg[u][v].clear()
            self._nxg[u][v][0] = dict(weight=l)
            if directed is False:
                self._nxg[v][u].clear()
                self._nxg[v][u][0] = dict(weight=l)
        else:
            self._nxg[u][v]['weight'] = l
            if directed is False:
                self._nxg[v][u]['weight'] = l
示例#48
0
 def _nx_nodes_to_cypher(self, graph: nx.Graph) -> Iterator[str]:
     """Generates a Cypher queries for creating nodes."""
     for nx_id, data in graph.nodes(data=True):
         yield self._create_node(nx_id, data)
示例#49
0
from networkx import Graph
from dppy.exotic_dpps import UST

# Build graph
g = Graph()
edges = [(0, 2), (0, 3), (1, 2), (1, 4), (2, 3), (2, 4), (3, 4)]
g.add_edges_from(edges)

# Initialize UST object
ust = UST(g)
# Display original graph
ust.plot_graph()
# Display some samples
for _ in range(3):
    ust.sample()
    ust.plot()
# Display underlyin kernel i.e. transfer current matrix
ust.plot_kernel()
def average(g: nx.Graph) -> float:
    n = g.number_of_nodes()

    sum_of_weights = sum(g[i][j]['weight'] for i in range(n) for j in range(i))
    return 2 * sum_of_weights / (n - 1)
示例#51
0
def create_tgt_tokengraph(dataset,
                          t_vocab,
                          s_vocab,
                          G: nx.Graph = None,
                          window_size: int = 2):
    """ Given a target dataset adds new nodes (occurs only in target domain)
    to existing token Graph. Update t_co count if node already exists.

     Use source vocab [s_vocab] for text to id mapping if exists, else use
      [t_vocab].

     NOTE: This should be called only after create_src_tokengraph() was called
     to create G.

    :param edge_attr: Name of the edge attribute, should match with param name
     when calling add_edge().
    :param window_size: Sliding window size
    :param G:
    :param dataset: TorchText dataset
    :param field: TorchText field containing vocab.
    :return:
    """
    ## Raise error if G not exist:
    if G is None:
        raise NotImplementedError('This method should be called only after '
                                  'create_src_tokengraph() was called to '
                                  'create G.')

    combined_s2i = s_vocab['str2idx_map']
    combined_i2s = s_vocab['idx2str_map']
    t_idx_start = len(s_vocab['str2idx_map']) + 1
    ## Add token's id (from s_vocab) as node id to the graph
    for token_str, token_id in t_vocab['str2idx_map'].items():
        if s_vocab['str2idx_map'][token_str] == 0 and token_str != '<unk>':
            # token_id = t_vocab.vocab.stoi[token_str]
            combined_s2i[token_str] = t_idx_start
            # combined_i2s[t_idx_start] = token_str
            combined_i2s.append(token_str)
            # try:
            #     token_emb = glove_embs[token_str]
            # except KeyError:
            #     token_emb = glove_embs['<unk>']
            # G.add_node(token_id, node_txt=token_str, s_co=0, t_co=token_id,
            #            emb=token_emb)
            G.add_node(t_idx_start,
                       node_txt=token_str,
                       s_co=0,
                       t_co=t_vocab['freqs'][token_str])
            t_idx_start = t_idx_start + 1
        # try:  ## Just add t_co value if node exists in G
        # except KeyError:  ## Create new node with s_co = 0 if node not in G
        else:
            G.node[s_vocab['str2idx_map'][token_str]]['t_co'] =\
                t_vocab['freqs'][token_str]

    for txt_toks in dataset:
        j = 0
        txt_len = len(txt_toks)
        if window_size is None or window_size > txt_len:
            window_size = txt_len

        slide = txt_len - window_size + 1

        for k in range(slide):
            txt_window = txt_toks[j:j + window_size]
            ## Co-occurrence in tweet:
            occurrences = find_cooccurrences(txt_window)

            ## Add edges with attribute:
            for token_pair, wt in occurrences.items():
                ## Get token ids from source if exists else from target
                try:
                    token1_id = s_vocab['str2idx_map'][token_pair[0]]
                except KeyError:
                    token1_id = t_vocab['str2idx_map'][token_pair[0]]
                try:
                    token2_id = s_vocab['str2idx_map'][token_pair[1]]
                except KeyError:
                    token2_id = t_vocab['str2idx_map'][token_pair[1]]

                if G.has_edge(token1_id, token2_id):
                    ##  Add value to existing edge if exists:
                    G[token1_id][token2_id]['t_pair'] += wt
                else:  ## Add new edge if not exists and make s_pair = 0
                    G.add_edge(token1_id, token2_id, s_pair=0, t_pair=wt)
            j = j + 1

    return G, combined_s2i
def assign_louvain_communities(
    reddit_graph: nx.Graph,
    wiki_graph: nx.Graph = None,
    reddit_edge_weight: str = "count",
    others_threshold: int = 2,
    louvain_resolution_reddit: float = 1,
) -> Union[nx.Graph, Tuple[nx.Graph, nx.Graph]]:
    """ "Calculate communities using the louvain algorithm and assign them as property to the graphs node.
    if two graphs are given, also assign one graph's communities to the other's.


    Args:
        reddit_graph (nx.Graph): Reddit Graph
        wiki_graph (nx.Graph, optional): Wikipedia graph. Defaults to None.
        reddit_edge_weight (str, optional): edge attribute to use for weighting. Defaults to "count".
        others_threshold (int, optional): minimum size of the communities. Communities smaller than this are mapped to "other". Defaults to 2.
        louvain_resolution_reddit (float, optional): granularity for the louvain algorithm on the reddit graph. Defaults to 1
    Returns:
        Union[nx.Graph, Tuple[nx.Graph, nx.Graph]]: [description]
    """
    reddit_dendrogram = community.generate_dendrogram(
        reddit_graph, weight=reddit_edge_weight, resolution=louvain_resolution_reddit
    )
    if wiki_graph:
        wiki_dendrogram = community.generate_dendrogram(
            wiki_graph,
        )

    # Iterate over reddit nodes to assign communities
    for node in reddit_graph:
        # Iterate over all levels of the dendrogram
        for level in range(len(reddit_dendrogram) - 1):
            actual_level = len(reddit_dendrogram) - 2 - level

            partition = community.partition_at_level(reddit_dendrogram, level)

            node_community = partition[node]
            counts = Counter(partition.values())
            if counts[node_community] < others_threshold:
                node_community = -1
            reddit_graph.nodes[node][
                f"louvain_community_reddit_R{louvain_resolution_reddit:.2f}_L{actual_level}"
            ] = f"L{actual_level}-{node_community:03}"
        if wiki_graph:
            # Also add the community from the other graph to allow comparing
            # Again, iterate over all levels in the dendrogram
            for level in range(len(wiki_dendrogram) - 1):
                actual_level = len(wiki_dendrogram) - 2 - level

                partition = community.partition_at_level(wiki_dendrogram, level)

                try:
                    node_community = partition[node]
                    counts = Counter(partition.values())
                    if counts[node_community] < others_threshold:
                        node_community = -1

                    reddit_graph.nodes[node][
                        f"louvain_community_wiki_L{actual_level}"
                    ] = f"L{actual_level}-{node_community:03}"

                except:
                    reddit_graph.nodes[node][
                        f"louvain_community_wiki_L{level}"
                    ] = f"L{level}-NONE"
    if wiki_graph:
        for node in wiki_graph:
            for level in range(
                len(wiki_dendrogram) - 1,
            ):
                actual_level = len(wiki_dendrogram) - 2 - level

                partition = community.partition_at_level(wiki_dendrogram, level)
                node_community = partition[node]

                counts = Counter(partition.values())
                if counts[node_community] < others_threshold:
                    node_community = -1

                wiki_graph.nodes[node][
                    f"louvain_community_wiki_L{actual_level}"
                ] = f"L{actual_level}-{node_community:03}"
            # Also add the community from the other graph to allow comparing

            for level in range(len(reddit_dendrogram) - 1):
                actual_level = len(reddit_dendrogram) - 2 - level

                partition = community.partition_at_level(reddit_dendrogram, level)

                try:
                    node_community = partition[node]

                    counts = Counter(partition.values())
                    if counts[node_community] < others_threshold:
                        node_community = -1
                    wiki_graph.nodes[node][
                        f"louvain_community_reddit_R{louvain_resolution_reddit:.2f}_L{actual_level}"
                    ] = f"L{actual_level}-{node_community:03}"
                except:
                    wiki_graph.nodes[node][
                        f"louvain_community_reddit_R{louvain_resolution_reddit:.2f}_L{level}"
                    ] = f"L{level}-NONE"

    return (
        (reddit_graph, reddit_dendrogram, wiki_graph, wiki_dendrogram)
        if wiki_graph
        else (reddit_graph, reddit_dendrogram)
    )
示例#53
0
文件: main.py 项目: DobroAlex/4kurs
def do_visit(
        G: nx.Graph,
        agent: Agent.Agent,
        is_node_visited_only_once: bool = False,
        start_node: int = None,
        is_animated: bool = True,
        path_to_save_yandex_animation: str = "output/animated_map/frames/",
        path_to_save_matplotlib_animation:
    str = "output/matplotlib_animated_map/frames/",
        is_using_strict_order: bool = False) -> None:
    iteration = 0

    path_to_matplotlib_frames = os.path.join(path_to_save_matplotlib_animation,
                                             "frames/")
    path_to_yandex_frames = os.path.join(path_to_save_yandex_animation,
                                         "frames/")
    is_first_visit = True
    if start_node is None:
        # node_to_visit  = random.randint(0, G.__len__() - 1)
        node_to_visit = random.randint(0, G.order() - 1)
        if is_using_strict_order:
            node_to_visit = 0  # Using first node. Node 0 must always be present and connected with at least one other
    else:
        node_to_visit = start_node
    while not GU.is_all_nodes_visited(G):
        if is_node_visited_only_once:
            while True:
                if is_using_strict_order:
                    break
                if G.nodes[node_to_visit][
                        'data'].state == PSE.possible_state.not_visited:
                    break
                else:
                    node_to_visit = random.randint(0, G.order() - 1)
        elif not is_node_visited_only_once:
            node_to_visit = random.randint(0, G.order() - 1)
        if is_using_strict_order:  # Causes an agent to visit places clearly in ascending order of node numbers.
            if not is_first_visit:
                node_to_visit += 1
                if node_to_visit not in G.nodes:
                    break  # node with max index already visited
        print("Now in node #{0} : {1}".format(node_to_visit,
                                              G.nodes[node_to_visit]))
        agent.visited_nodes.append(node_to_visit)
        for target_person in G.nodes[node_to_visit]['data'].persons:
            # calculating probability that agent will infect persons
            for disease_of_agent, disease_of_agent_permissibility in agent.infected_with.items(
            ):  # iterating over
                # dict's items
                # https://stackoverflow.com/questions/5466618/too-many-values-to-unpack-iterating-over-a-dict-key-string-value-list
                probability = GU.calc_infection_probability(
                    Infection.infection(
                        name=disease_of_agent,
                        permissibility=disease_of_agent_permissibility),
                    target_person, G.nodes[node_to_visit]
                    ['data'].persons)  # TODO : TEST THIS LINE
                # print("For agent {0} and target {1} in place {2}, probability of {3} = {4}".format(agent, target_person,
                #                                                                                   G.nodes[
                ##                                                                                       node_to_visit][
                #                                                                                      'data'].name,
                #                                                                                  disease_of_agent,
                #                                                                                   probability))
                if probability >= 0.5:
                    target_person.infected_with[
                        disease_of_agent] = agent.infected_with[
                            disease_of_agent]
                    G.nodes[node_to_visit][
                        'data'].state = PSE.possible_state.infected
                else:
                    if G.nodes[node_to_visit][
                            'data'].state != PSE.possible_state.infected:
                        G.nodes[node_to_visit][
                            'data'].state = PSE.possible_state.not_infected
            for disease_of_person, disease_of_person_permissibility in target_person.infected_with.items(
            ):  # calculating probability that person will infect agent with something new
                if disease_of_person in agent.infected_with:
                    break
                probability = GU.calc_infection_probability(
                    Infection.infection(disease_of_person,
                                        disease_of_person_permissibility),
                    agent, G.nodes[node_to_visit]['data'].persons)
                if probability >= 0.5:
                    agent.infected_with[
                        disease_of_person] = disease_of_person_permissibility
        GU.graph_show_and_save(
            G,
            name_to_save="frame" +
            str(len(next(os.walk(path_to_matplotlib_frames))[2])),
            path_to_save=path_to_matplotlib_frames,
            to_save=True,
            text="Graph after agent interference in node {0}, agent : {1}".
            format(
                G.nodes[node_to_visit]['data'].name +
                str(G.nodes[node_to_visit]['data'].number + 1), agent))
        GU.get_map(G,
                   agent,
                   name_to_save="frame" +
                   str(len(next(os.walk(path_to_yandex_frames))[2])) + ".png",
                   path_to_save=path_to_yandex_frames)
        infection_tick(G)
        GU.graph_show_and_save(
            G,
            name_to_save="frame" +
            str(len(next(os.walk(path_to_matplotlib_frames))[2])),
            path_to_save=path_to_matplotlib_frames,
            to_save=True,
            text="Graph after in-node interference")
        GU.get_map(G,
                   agent,
                   name_to_save="frame" +
                   str(len(next(os.walk(path_to_yandex_frames))[2])) + ".png",
                   path_to_save=path_to_yandex_frames)
        prev_node = node_to_visit
        iteration += 1

        if is_first_visit:
            is_first_visit = False
示例#54
0
def clique_merge(graph: nx.Graph, report=False) -> nx.Graph:
    """
    Builds up cliques using the `same_as` attribute of each node. Uses those
    cliques to build up a mapping for relabelling nodes. Chooses labels so as
    to preserve the original nodes, rather than taking xrefs that don't appear
    as nodes in the graph.

    This method will also expand the `same_as` attribute of the nodes to
    include the discovered clique.
    """
    original_size = len(graph)
    print('original graph has {} nodes'.format(original_size))

    cliqueGraph = nx.Graph()

    with click.progressbar(
            graph.nodes(data=True),
            label='building cliques from same_as node property') as bar:
        for n, attr_dict in bar:
            if 'same_as' in attr_dict:
                for m in attr_dict['same_as']:
                    cliqueGraph.add_edge(n, m)

    with click.progressbar(graph.edges(data=True),
                           label='building cliques from same_as edges') as bar:
        for u, v, attr_dict in bar:
            if 'edge_label' in attr_dict and attr_dict[
                    'edge_label'] == 'same_as':
                cliqueGraph.add_edge(u, v)

    edges = []
    with click.progressbar(cliqueGraph.edges(),
                           label='Breaking invalid cliques') as bar:
        for u, v in bar:
            try:
                u_categories = graph.node[u].get('category', [])
                v_categories = graph.node[v].get('category', [])
            except:
                continue
            l = len(edges)
            for a in u_categories:
                if len(edges) > l:
                    break
                if get_toolkit().get_element(a) is None:
                    continue
                for b in v_categories:
                    if get_toolkit().get_element(b) is None:
                        continue
                    a_ancestors = get_toolkit().ancestors(a)
                    b_ancestors = get_toolkit().ancestors(b)
                    if a_ancestors == b_ancestors == []:
                        continue
                    elif a not in b_ancestors and b not in a_ancestors:
                        edges.append((u, v))
                        break

    print('breaking {} many edges'.format(len(edges)))
    cliqueGraph.remove_edges_from(edges)

    mapping = {}

    connected_components = list(nx.connected_components(cliqueGraph))

    print('Discovered {} cliques'.format(len(connected_components)))

    with click.progressbar(connected_components,
                           label='building mapping') as bar:
        for nodes in bar:
            nodes = list(nodes)
            categories = set()
            for n in nodes:
                if not graph.has_node(n):
                    continue

                attr_dict = graph.node[n]

                attr_dict['same_as'] = nodes

                if 'category' in attr_dict:
                    categories.update(listify(attr_dict['category']))

                if 'categories' in attr_dict:
                    categories.update(listify(attr_dict['categories']))

            list_of_prefixes = []
            for category in categories:
                try:
                    list_of_prefixes.append(
                        get_toolkit().get_element(category).id_prefixes)
                except:
                    pass

            nodes.sort()
            nodes.sort(key=build_sort_key(list_of_prefixes))

            for n in nodes:
                if n != nodes[0]:
                    mapping[n] = nodes[0]

    g = relabel_nodes(graph, mapping)

    edges = []
    for u, v, key, data in g.edges(keys=True, data=True):
        if data.get('edge_label') == 'same_as':
            edges.append((u, v, key))
    g.remove_edges_from(edges)

    for n, data in g.nodes(data=True):
        data['iri'] = expand_uri(n)
        if 'id' in data and data['id'] != n:
            data['id'] = n
        if 'same_as' in data and n in data['same_as']:
            data['same_as'].remove(n)
            if data['same_as'] == []:
                del data['same_as']

    final_size = len(g)
    print('Resulting graph has {} nodes'.format(final_size))
    print('Eliminated {} nodes'.format(original_size - final_size))

    return g
示例#55
0
    def convert_nx_to_dgl(self, G: nx.Graph) -> dgl.DGLGraph:
        """
        Converts ``NetworkX`` graph to ``DGL``

        :param G: ``nx.Graph`` to convert to ``DGLGraph``
        :type G: nx.Graph
        :return: ``DGLGraph`` object version of input ``NetworkX`` graph
        :rtype: dgl.DGLGraph
        """
        g = dgl.DGLGraph()
        node_id = [n for n in G.nodes()]
        G = nx.convert_node_labels_to_integers(G)

        ## add node level feat

        node_dict = {}
        for i, (_, feat_dict) in enumerate(G.nodes(data=True)):
            for key, value in feat_dict.items():
                if str(key) in self.columns:
                    node_dict[str(key)] = ([value] if i == 0 else
                                           node_dict[str(key)] + [value])

        string_dict = {}
        node_dict_transformed = {}
        for i, j in node_dict.items():
            if i == "coords":
                node_dict_transformed[i] = torch.Tensor(
                    np.asarray(j)).type("torch.FloatTensor")
            elif i == "dist_mat":
                node_dict_transformed[i] = torch.Tensor(np.asarray(
                    j[0].values)).type("torch.FloatTensor")
            elif self.type2form[i] == "str":
                string_dict[i] = j
            elif self.type2form[i] == "float":
                node_dict_transformed[i] = torch.Tensor(np.array(j))
            elif self.type2form[i] == "int":
                node_dict_transformed[i] = torch.Tensor(np.array(j))

        g.add_nodes(
            len(node_id),
            node_dict_transformed,
        )

        edge_dict = {}
        edge_index = torch.LongTensor(list(G.edges)).t().contiguous()

        # add edge level features
        for i, (_, _, feat_dict) in enumerate(G.edges(data=True)):
            for key, value in feat_dict.items():
                if str(key) in self.columns:
                    edge_dict[str(key)] = (list(value) if i == 0 else
                                           edge_dict[str(key)] + list(value))

        edge_transform_dict = {}
        for i, j in node_dict.items():
            if self.type2form[i] == "str":
                string_dict[i] = j
            elif self.type2form[i] == "float":
                edge_transform_dict[i] = torch.Tensor(np.array(j))
            elif self.type2form[i] == "int":
                edge_transform_dict[i] = torch.Tensor(np.array(j))

        g.add_edges(edge_index[0], edge_index[1], edge_transform_dict)

        # add graph level features
        graph_dict = {}
        for i, feat_name in enumerate(G.graph):
            if str(feat_name) in self.columns:
                graph_dict[str(feat_name)] = [G.graph[feat_name]]

        return g
def brute_force(g: nx.Graph) -> int:
    n = g.number_of_nodes()
    cycles = [cycle_length(g, p) for p in permutations(range(n))]
    return min(cycles)
示例#57
0
文件: shape.py 项目: rohan/elbridge
def create_block_graph(block_config, block_groups: nx.Graph) -> nx.Graph:
    """Using a block group graph as a base, build a block graph."""

    indir = block_config.get("directory", "wa-blocks")
    infile = block_config.get("filename", "blocks.shp")

    draw_shapefile = block_config.get("draw_shapefile", False)
    draw_graph = block_config.get("draw_graph", False)

    pickle = block_config.get("pickle_graph", True)

    reload_graph = block_config.get("reload_graph", False)

    if not reload_graph:
        if os.path.exists(
                os.path.join(indir, infile + ".annotated_graph.pickle")):
            return nx.read_gpickle(
                os.path.join(indir, infile + ".annotated_graph.pickle"))
        elif os.path.exists(os.path.join(indir, infile + ".graph.pickle")):
            return nx.read_gpickle(
                os.path.join(indir, infile + ".graph.pickle"))

    G = nx.Graph()
    # block group --> list of vertices in that block group
    blocks_per_block_group = defaultdict(list)

    with cd(indir):
        with fiona.open(infile) as blocks:
            for shp in tqdm(blocks, "Reading blocks from shapefile"):
                geo_id = shp['properties'].get('GEOID10')
                # name = shp['properties'].get('NAME10', "Block " + str(idx))
                block_obj = shape(shp['geometry'])
                G.add_node(geo_id, shape=block_obj)

                # GEOID of block == GEOID of block group + block ID
                block_group = geo_id[:-3]
                blocks_per_block_group[block_group].append(geo_id)

    if draw_shapefile:
        plot_shapes([n[1]['shape'] for n in G.nodes(data=True)])

    for i in tqdm(block_groups.nodes(), "Building block group subgraphs"):
        _connect_subgraph(G,
                          blocks_per_block_group[i],
                          blocks_per_block_group[i],
                          same=True)

    for i, j in tqdm(block_groups.edges(),
                     "Building cross-block group subgraphs"):
        _connect_subgraph(G, blocks_per_block_group[i],
                          blocks_per_block_group[j])

    if draw_graph:
        pos = {
            n[0]: [n[1]['shape'].centroid.x, n[1]['shape'].centroid.y]
            for n in G.nodes(data=True)
        }
        nx.draw_networkx(G, pos=pos)
        plt.show()

    if pickle:
        nx.write_gpickle(G, os.path.join(indir, infile + ".graph.pickle"))

    return G
示例#58
0
def test_several_communities():
    test = Graph()

    # c1
    test.add_edge('1a', '1b')
    test.add_edge('1a', '1c')
    test.add_edge('1b', '1c')

    # c2
    test.add_edge('2a', '2b')
    test.add_edge('2a', '2c')
    test.add_edge('2b', '2c')

    # c3
    test.add_edge('3a', '3b')
    test.add_edge('3a', '3c')
    test.add_edge('3b', '3c')

    # c4
    test.add_edge('4a', '4b')
    test.add_edge('4a', '4c')
    test.add_edge('4b', '4c')

    # c5
    test.add_edge('5a', '5b')
    test.add_edge('5a', '5c')
    test.add_edge('5b', '5c')

    # ground truth
    ground_truth = set([
        frozenset(['1a', '1c', '1b']),
        frozenset(['2a', '2c', '2b']),
        frozenset(['3a', '3c', '3b']),
        frozenset(['4a', '4c', '4b']),
        frozenset(['5a', '5c', '5b'])
    ])

    communities = asyn_lpa.asyn_lpa_communities(test)
    result = {frozenset(c) for c in communities}
    assert_equal(result, ground_truth)
示例#59
0
文件: map.py 项目: esirK/mediacloud
def get_giant_component(graph: nx.Graph) -> nx.Graph:
    """Return the giant component subgraph of the graph."""
    components = sorted(nx.connected_components(graph), key=len)

    return graph.subgraph(components[-1]) if len(components) > 0 else graph
示例#60
0
def _analyze_skeleton(project_id, skeleton_id, adjacents):
    """ Takes a skeleton and returns a list of potentially problematic issues,
    as a list of tuples of two values: issue type and treenode ID.
    adjacents: the number of nodes in the paths starting at a node when checking for duplicated connectors.
    """
    project_id = int(project_id)
    skeleton_id = int(skeleton_id)
    cursor = connection.cursor()

    PRE = 'presynaptic_to'
    POST = 'postsynaptic_to'

    # Retrieve relation IDs vs names
    cursor.execute('''
    SELECT id, relation_name
    FROM relation
    WHERE project_id = %s
      AND (relation_name = '%s'
           OR relation_name = '%s')
    ''' % (project_id, PRE, POST))

    relations = {} # both ways
    for row in cursor.fetchall():
        relations[row[0]] = row[1]
        relations[row[1]] = row[0]

    # Transform strings to integer IDs
    PRE = relations[PRE]
    POST = relations[POST]

    # Retrieve all connectors and their associated pre- or postsynaptic treenodes,
    # plus the parent treenodes of these.
    cursor.execute('''
    SELECT tc1.connector_id,
           tc1.relation_id,
           t1.id,
           t1.skeleton_id,
           tc2.relation_id,
           t2.id,
           t2.skeleton_id
    FROM treenode_connector tc1,
         treenode_connector tc2,
         treenode t1,
         treenode t2
    WHERE tc1.skeleton_id = %s
      AND tc1.connector_id = tc2.connector_id
      AND tc1.treenode_id = t1.id
      AND tc2.treenode_id = t2.id
      AND (tc1.relation_id = %s OR tc1.relation_id = %s)
      AND (tc2.relation_id = %s OR tc2.relation_id = %s)
    ''' % (skeleton_id,
           str(PRE), str(POST),
           str(PRE), str(POST)))

    Treenode = namedtuple('Treenode', ['id', 'skeleton_id'])

    # Map of connector_id vs {pre: {Treenode, ...}, post: {Treenode, ...}}
    connectors = defaultdict(partial(defaultdict, set))

    # Condense rows to connectors represented by a map with two entries (PRE and POST),
    # each containing as value a set of Treenode:
    for row in cursor.fetchall():
        s = connectors[row[0]]
        s[row[1]].add(Treenode(row[2], row[3]))
        # The 'other' could be null
        if row[4]:
            s[row[4]].add(Treenode(row[5], row[6]))

    issues = []

    # Set of IDs of outgoing connectors
    pre_connector_ids = set()

    for connector_id, connector in connectors.iteritems():
        pre = connector[PRE]
        post = connector[POST]
        if pre and post:
            for a in pre:
                for b in post:
                    if a.skeleton_id == b.skeleton_id:
                        # Type 0: autapse
                        issues.append((0, a.id if a.skeleton_id == skeleton_id else b.id))
        if not post:
            # Type 2: presynaptic connector without postsynaptic treenodes
            issues.append((2, iter(pre).next().id))
        if not pre:
            # Type 3: postsynaptic connector without presynaptic treenode
            issues.append((3, iter(post).next().id))
        else:
            if iter(pre).next().skeleton_id != skeleton_id:
                repeats = tuple(t.id for t in post if t.skeleton_id == skeleton_id)
                if len(repeats) > 1:
                    # Type 1: two or more times postsynaptic to the same connector
                    issues.append((1, repeats[0]))
            else:
                pre_connector_ids.add(connector_id)

    # Fetch data for type 4 and 5: all treenode, with tags if any
    cursor.execute('''
    SELECT treenode.id,
           treenode.parent_id,
           class_instance.name
    FROM treenode
             LEFT OUTER JOIN
                 (treenode_class_instance INNER JOIN relation ON (treenode_class_instance.relation_id = relation.id AND relation.relation_name = 'labeled_as') INNER JOIN class_instance ON (treenode_class_instance.class_instance_id = class_instance.id))
             ON (treenode_class_instance.treenode_id = treenode.id)
    WHERE treenode.skeleton_id = %s
    ''' % skeleton_id)

    # Collapse repeated rows into nodes with none or more tags
    nodes = {}
    parents = set()
    root = None
    for row in cursor.fetchall():
        node = nodes.get(row[0])
        if node:
            # Append tag
            node[1].append(row[2])
        else:
            nodes[row[0]] = (row[1], [row[2]])

        if row[1]:
            parents.add(row[1])
        else:
            root = row[0]


    # Type 4: potentially duplicated synapses (or triplicated, etc):
    # Check if two or more connectors share pre treenodes and post skeletons,
    # or pre skeletons and post treenodes,
    # considering the treenode and its parent as a group.
    if adjacents > 0:
        graph = Graph()
        for node_id, props in nodes.iteritems():
            if props[0]:
                # Nodes are added automatically
                graph.add_edge(props[0], node_id)
    else:
        graph = None

    Connector = namedtuple("Connector", ['id', 'treenode_id', 'treenodes', 'skeletons'])

    # Check if there are any duplicated presynaptic connectors
    pre_connectors = []
    for connector_id in pre_connector_ids:
        c = connectors[connector_id]
        treenode_id = iter(c[PRE]).next().id
        pre_treenodes = set(chain.from_iterable(single_source_shortest_path(graph, treenode_id, adjacents).values()))
        post_skeletons = set(t.skeleton_id for t in c[POST])
        pre_connectors.append(Connector(connector_id, treenode_id, pre_treenodes, post_skeletons))

    def issue4s(cs):
        for i, c1 in enumerate(cs):
            for c2 in islice(cs, i+1, None):
                if (c1.treenodes & c2.treenodes) and (c1.skeletons & c2.skeletons):
                    # Type 4: potentially duplicated connector
                    issues.append((4, c1.treenode_id))
                    if c1.treenode_id != c2.treenode_id:
                        issues.append((4, c2.treenode_id))

    issue4s(pre_connectors)

    # Check if there are any duplicated postsynaptic connectors
    post_connectors = []
    for connector_id, c in connectors.iteritems():
        if connector_id in pre_connector_ids:
            continue
        treenode_id = (t.id for t in c[POST] if t.skeleton_id == skeleton_id).next()
        pre_skeletons = set(t.skeleton_id for t in c[PRE])
        post_treenodes = set(chain.from_iterable(single_source_shortest_path(graph, treenode_id, adjacents).values()))
        post_connectors.append(Connector(connector_id, treenode_id, post_treenodes, pre_skeletons))

    issue4s(post_connectors)


    # Type 5: end node without a tag
    # Type 6: node with a TODO tag
    # Type 7: root, slab or branch node with a tag like 'ends', 'not a branch', 'uncertain end', or 'uncertain continuation'
    end_labels = set(['ends', 'not a branch', 'uncertain end', 'uncertain continuation', 'soma', 'nerve out'])
    if root in parents:
        parents.remove(root) # Consider the root as a leaf node
    for node_id, props in nodes.iteritems():
        labels = set(props[1])
        if node_id not in parents:
            if not (labels & end_labels):
                # Type 5: node is a leaf without an end-node label
                issues.append((5, node_id))
        elif labels & end_labels:
            # Type 7: node is not a leaf but has an end-node label
            issues.append((7, node_id))
        if 'TODO' in labels:
            # Type 6: node with a tag containing the string 'TODO'
            issues.append((6, node_id))

    return issues