def netStats(opt_fname,dummy_fname):
    print 'Evaluating statistics from %s,%s'%(opt_fname,dummy_fname)

    opt_tab=[a.strip().split('\tpp\t') for a in open(opt_fname,'r').readlines()]
    dummy_tab=[[a.strip().split('\t')[0],a.strip().split('\t')[2]] for a in open(dummy_fname,'r').readlines()]


    stats={}
    ##get optimal number of edges
    opt_g=networkx.from_edgelist([(a[0],a[1]) for a in opt_tab])
    dum_g=networkx.from_edgelist([(a[0],a[1]) for a in dummy_tab])

    stats['numEdges'] = len(opt_tab)
    ##get number of trees (dummy - opt)
    stats['numTrees'] = networkx.number_connected_components(opt_g) #len(dummy_tab) - len(opt_tab)

    conn_comps=networkx.connected_components(opt_g)

    ##number of nodes
    nodes=opt_g.nodes()
    stats['numNodes']=len(nodes)

    ##get tree sizes
    stats['treeSizes']=','.join([str(len(c)) for c in sorted(conn_comps, key=len, reverse=True)])

    ##find ubiquitin?
    if 'UBC' in nodes:
    	stats['hasUBC']='True'
    else:
	stats['hasUBC']='False'

    return stats
    def _addNonbondedForceToSystem(self, sys, verbose):
        '''Create the nonbonded force
        '''
        nb = mm.NonbondedForce()
        sys.addForce(nb)

        q = '''SELECT charge, sigma, epsilon
        FROM particle INNER JOIN nonbonded_param
        ON particle.nbtype=nonbonded_param.id'''
        for charge, sigma, epsilon in self._conn.execute(q):
            nb.addParticle(charge, sigma*angstrom, epsilon*kilocalorie_per_mole)

        if verbose:
            # Bond graph (for debugging)
            g = nx.from_edgelist(self._conn.execute('SELECT p0, p1 FROM stretch_harm_term').fetchall())
            nbnames = {1: '1-2', 2:'1-3', 3:'1-4'}

        q = '''SELECT p0, p1, aij, bij, qij
        FROM pair_12_6_es_term INNER JOIN pair_12_6_es_param
        ON pair_12_6_es_term.param=pair_12_6_es_param.id;'''
        for p0, p1, a_ij, b_ij, q_ij in self._conn.execute(q):
            if verbose:
                l = nx.algorithms.shortest_path_length(g, p0, p1)
                print 'Scaling interaction for a %d-%d (%s) interaction' % (p0, p1, nbnames[l])
            a_ij = (a_ij*kilocalorie_per_mole*(angstrom**12)).in_units_of(kilojoule_per_mole*(nanometer**12))
            b_ij = (b_ij*kilocalorie_per_mole*(angstrom**6)).in_units_of(kilojoule_per_mole*(nanometer**6))
            q_ij = q_ij*elementary_charge**2

            if (b_ij._value == 0.0) or (a_ij._value == 0.0):
                new_epsilon = 0
                new_sigma = 1
            else:
                new_epsilon =  b_ij**2/(4*a_ij)
                new_sigma = (a_ij / b_ij)**(1.0/6.0)
            nb.addException(p0, p1, q_ij, new_sigma, new_epsilon)

        n_total = self._conn.execute('''SELECT COUNT(*) FROM pair_12_6_es_term''').fetchone()
        n_in_exclusions= self._conn.execute('''SELECT COUNT(*)
        FROM exclusion INNER JOIN pair_12_6_es_term
        ON exclusion.p0==pair_12_6_es_term.p0 AND exclusion.p1==pair_12_6_es_term.p1''').fetchone()
        if not n_total == n_in_exclusions:
            raise NotImplementedError('All pair_12_6_es_terms must have a corresponding exclusion')

        # Desmond puts scaled 1-4 interactions in the pair_12_6_es
        # table, and then adds a corresponding exception here. We are
        # using the exception part of NonbondedForce, so we're just
        # adding the 1-4 interaction as an exception when its
        # registered, and then NOT registering it as an exception here.
        q = '''SELECT E.p0, E.p1
        FROM exclusion E LEFT OUTER JOIN pair_12_6_es_term P ON
        E.p0 = P.p0 and E.p1 = P.p1
        WHERE P.p0 is NULL'''
        # http://stackoverflow.com/questions/5464131/finding-pairs-that-do-not-exist-in-a-different-table
        for p0, p1 in self._conn.execute(q):
            if verbose:
                l = nx.algorithms.shortest_path_length(g, p0, p1)
                print 'Creating exception for a %d-%d (%s) interaction' % (p0, p1, nbnames[l])
            nb.addException(p0, p1, 0.0, 1.0, 0.0)

        return nb
Exemple #3
0
def broken_faces(mesh, color=None):
    """
    Return the index of faces in the mesh which break the
    watertight status of the mesh.

    Parameters
    --------------
    mesh: Trimesh object
    color: (4,) uint8, will set broken faces to this color
           None,       will not alter mesh colors

    Returns
    ---------------
    broken: (n, ) int, indexes of mesh.faces
    """
    adjacency = nx.from_edgelist(mesh.face_adjacency)
    broken = [k for k, v in dict(adjacency.degree()).items()
              if v != 3]
    broken = np.array(broken)
    if color is not None:
        # if someone passed a broken color
        color = np.array(color)
        if not (color.shape == (4,) or color.shape == (3,)):
            color = [255, 0, 0, 255]
        mesh.visual.face_colors[broken] = color
    return broken
Exemple #4
0
def get_angle_connectivity(ibonds):
    """Given the bonds, get the indices of the atoms defining all the bond
    angles
    
    Parameters
    ----------
    ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int
        n_bonds x 2 array of indices, where each row is the index of two
        atom who participate in a bond.
    
    Returns
    -------
    iangles : np.ndarray, shape[n_angles, 3], dtype=int
        n_angles x 3 array of indices, where each row is the index of three
        atoms m,n,o such that n is bonded to both m and o.
    """

    graph = nx.from_edgelist(ibonds)
    n_atoms = graph.number_of_nodes()
    iangles = []

    for i in xrange(n_atoms):
        for (m, n) in combinations(graph.neighbors(i), 2):
            # so now the there is a bond angle m-i-n
            iangles.append((m, i, n))

    return np.array(iangles)
Exemple #5
0
def get_dihedral_connectivity(ibonds):
    """Given the bonds, get the indices of the atoms defining all the dihedral
    angles
    
    Parameters
    ----------
    ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int
        n_bonds x 2 array of indices, where each row is the index of two
        atom who participate in a bond.
    
    Returns
    -------
    idihedrals : np.ndarray, shape[n_dihedrals, 4], dtype=int
        All sets of 4 atoms A,B,C,D such that A is bonded to B, B is bonded
        to C, and C is bonded to D
    """
    graph = nx.from_edgelist(ibonds)
    n_atoms = graph.number_of_nodes()
    idihedrals = []
    
    # TODO: CHECK FOR DIHEDRAL ANGLES THAT ARE 180 and recover
    # conf : msmbuilder.Trajectory
    #    An msmbuilder trajectory, only the first frame will be used. This
    #    is used purely to make the check for angle(ABC) != 180.

    for a in xrange(n_atoms):
        for b in graph.neighbors(a):
            for c in ifilter(lambda c: c not in [a, b], graph.neighbors(b)):
                for d in ifilter(lambda d: d not in [a, b, c], graph.neighbors(c)):
                    idihedrals.append((a, b, c, d))

    return np.array(idihedrals)
def angle_connectivity(ibonds):
    """Given the bonds, get the indices of the atoms defining all the bond
    angles

    A 'bond angle' is defined as any set of 3 atoms, `i`, `j`, `k` such that
    atom `i` is bonded to `j` and `j` is bonded to `k`

    Parameters
    ----------
    ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int
        Each row in `ibonds` is a pair of indicies `i`, `j`, indicating that
        atoms `i` and `j` are bonded

    Returns
    -------
    iangles : np.ndarray, shape[n_angles, 3], dtype=int
        n_angles x 3 array of indices, where each row is the index of three
        atoms m,n,o such that n is bonded to both m and o.
    """

    graph = nx.from_edgelist(ibonds)
    iangles = []

    for i in graph.nodes():
        for (m, n) in combinations(graph.neighbors(i), 2):
            # so now the there is a bond angle m-i-n
            iangles.append((m, i, n))

    return np.array(iangles)
def network_analysis(gene_list,network_file,outdir):
    outfn = "%s/output" % outdir
    f = open(outfn,'w')
    f.write("gene\tdegrees\tbtw_centrality\n")
    network = networkx.read_adjlist(network_file)
    print "Number of edges in input graph: %s" % network.number_of_edges()
    print "Number of nodes in input graph: %s" % network.number_of_nodes()
    subnetwork = network.subgraph(gene_list)
    print "Number of edges in subgraph: %s" % subnetwork.number_of_edges()
    print "Number of nodes in subgraph: %s" % subnetwork.number_of_nodes()
    bwt_central = networkx.betweenness_centrality(subnetwork)
    degrees = subnetwork.degree(gene_list)
    for gene in gene_list:
        # Number of degrees
        if gene in degrees:
            num_degrees = degrees[gene]
        else:
            num_degress = "NA"
        # Betweenness centrality
        if gene in bwt_central:
            btw_gene = bwt_central[gene]
        else:
            btw_gene = "NA"
        # File with neighbor nodes
        if subnetwork.has_node(gene):
            neighbors = list(networkx.all_neighbors(subnetwork,gene))
            edges = [(unicode(gene),neighbor) for neighbor in neighbors]
            neighbor_networks = networkx.from_edgelist(edges)
            write_networks(neighbor_networks,gene,outdir)
        f.write("%s\t%s\t%s\n" % (gene,num_degrees,btw_gene))
    f.close()
Exemple #8
0
def fix_face_winding(mesh):
    '''
    Traverse and change mesh faces in-place to make sure winding is coherent, 
    or that edges on adjacent faces are in opposite directions
    '''
    # we create the face adjacency graph: 
    # every node in g is an index of mesh.faces
    # every edge in g represents two faces which are connected
    graph_all = nx.from_edgelist(mesh.face_adjacency)
    flipped   = 0
    # we are going to traverse the graph using BFS, so we have to start
    # a traversal for every connected component
    for graph in nx.connected_component_subgraphs(graph_all):
        start = graph.nodes()[0]
        # we traverse every pair of faces in the graph
        # we modify mesh.faces and mesh.face_normals in place 
        for face_pair in nx.bfs_edges(graph, start):
            # for each pair of faces, we convert them into edges,
            # find the edge that both faces share, and then see if the edges
            # are reversed in order as you would expect in a well constructed mesh
            pair    = mesh.faces[[face_pair]]            
            edges   = faces_to_edges(pair)
            overlap = group_rows(np.sort(edges,axis=1), require_count=2)
            if len(overlap) == 0:
                # only happens on non-watertight meshes
                continue
            edge_pair = edges[[overlap[0]]]
            if edge_pair[0][0] == edge_pair[1][0]:
                # if the edges aren't reversed, invert the order of one of the faces
                flipped += 1
                mesh.faces[face_pair[1]] = mesh.faces[face_pair[1]][::-1]
    log.info('Flipped %d/%d edges', flipped, len(mesh.faces)*3)
Exemple #9
0
def split_nx(mesh, check_watertight=True, only_count=False):
    '''
    Given a mesh, will split it up into a list of meshes based on face connectivity
    If check_watertight is true, it will only return meshes where each face has
    exactly 3 adjacent faces, which is a simple metric for being watertight.
    '''
    def mesh_from_components(connected_faces):
        if check_watertight:
            subgraph   = nx.subgraph(face_adjacency, connected_faces)
            watertight = np.equal(list(subgraph.degree().values()), 3).all()
            if not watertight: return
        faces  = mesh.faces[[connected_faces]]
        unique = np.unique(faces.reshape(-1))
        replacement = dict()
        replacement.update(np.column_stack((unique, np.arange(len(unique)))))
        faces = replace_references(faces, replacement).reshape((-1,3))
        new_meshes.append(mesh.__class__(vertices     = mesh.vertices[[unique]],
                                         faces        = faces,
                                         face_normals = mesh.face_normals[[connected_faces]]))
    face_adjacency = nx.from_edgelist(mesh.face_adjacency())
    new_meshes     = deque()
    components     = list(nx.connected_components(face_adjacency))
    if only_count: return len(components)

    for component in components: mesh_from_components(component)
    log.info('split mesh into %i components.',
             len(new_meshes))
    return list(new_meshes)
Exemple #10
0
 def _process(self, element, key=None):
     if self.p.layout and isinstance(self.p.layout, FunctionType):
         import networkx as nx
         edges = element.array([0, 1])
         graph = nx.from_edgelist(edges)
         if 'weight' in self.p.kwargs:
             weight = self.p.kwargs['weight']
             for (s, t), w in zip(edges, element[weight]):
                 graph.edges[s, t][weight] = w
         positions = self.p.layout(graph, **self.p.kwargs)
         nodes = [tuple(pos)+(idx,) for idx, pos in sorted(positions.items())]
     else:
         source = element.dimension_values(0, expanded=False)
         target = element.dimension_values(1, expanded=False)
         nodes = np.unique(np.concatenate([source, target]))
         if self.p.layout:
             import pandas as pd
             df = pd.DataFrame({'index': nodes})
             nodes = self.p.layout(df, element.dframe(), **self.p.kwargs)
             nodes = nodes[['x', 'y', 'index']]
         else:
             nodes = circular_layout(nodes)
     nodes = element.node_type(nodes)
     if element._nodes:
         for d in element.nodes.vdims:
             vals = element.nodes.dimension_values(d)
             nodes = nodes.add_dimension(d, len(nodes.vdims), vals, vdim=True)
     if self.p.only_nodes:
         return nodes
     return element.clone((element.data, nodes))
Exemple #11
0
def fix_winding(mesh):
    """
    Traverse and change mesh faces in-place to make sure winding
    is correct, with edges on adjacent faces in
    opposite directions.

    Parameters
    -------------
    mesh: Trimesh object

    Alters
    -------------
    mesh.face: will reverse columns of certain faces
    """
    # anything we would fix is already done
    if mesh.is_winding_consistent:
        return

    graph_all = nx.from_edgelist(mesh.face_adjacency)
    flipped = 0

    faces = mesh.faces.view(np.ndarray).copy()

    # we are going to traverse the graph using BFS
    # start a traversal for every connected component
    for components in nx.connected_components(graph_all):
        # get a subgraph for this component
        g = graph_all.subgraph(components)
        # get the first node in the graph in a way that works on nx's
        # new API and their old API
        start = next(iter(g.nodes()))

        # we traverse every pair of faces in the graph
        # we modify mesh.faces and mesh.face_normals in place
        for face_pair in nx.bfs_edges(g, start):
            # for each pair of faces, we convert them into edges,
            # find the edge that both faces share and then see if edges
            # are reversed in order as you would expect
            # (2, ) int
            face_pair = np.ravel(face_pair)
            # (2, 3) int
            pair = faces[face_pair]
            # (6, 2) int
            edges = faces_to_edges(pair)
            overlap = group_rows(np.sort(edges, axis=1),
                                 require_count=2)
            if len(overlap) == 0:
                # only happens on non-watertight meshes
                continue
            edge_pair = edges[overlap[0]]
            if edge_pair[0][0] == edge_pair[1][0]:
                # if the edges aren't reversed, invert the order of one face
                flipped += 1
                faces[face_pair[1]] = faces[face_pair[1]][::-1]

    if flipped > 0:
        mesh.faces = faces

    log.debug('flipped %d/%d edges', flipped, len(mesh.faces) * 3)
Exemple #12
0
def load_from_edge_list(filename):
    edgelist = []
    with open(filename, "r") as fh:
        for line in fh.readlines():
            source, target = line.split(",")
            edgelist.append((int(source), int(target)))

    return nx.from_edgelist(edgelist)
 def __init__(self, name, group_type, size, hours, school, data):
     self.name = name
     self.group_type = group_type
     self.size = size
     self.hours = hours
     self.school = school
     self.data = data
     self.G = nx.from_edgelist(self.data)
Exemple #14
0
def fix_normals(mesh):
    '''
    Find and fix problems with mesh.face_normals and mesh.faces winding direction.
    
    For face normals ensure that vectors are consistently pointed outwards,
    and that mesh.faces is wound in the correct direction for all connected components.
    '''
    mesh.generate_face_normals()
    # we create the face adjacency graph: 
    # every node in g is an index of mesh.faces
    # every edge in g represents two faces which are connected
    graph = nx.from_edgelist(mesh.face_adjacency())
    
    # we are going to traverse the graph using BFS, so we have to start
    # a traversal for every connected component
    for connected in nx.connected_components(graph):
        # we traverse every pair of faces in the graph
        # we modify mesh.faces and mesh.face_normals in place 
        for face_pair in nx.bfs_edges(graph, connected[0]):
            # for each pair of faces, we convert them into edges,
            # find the edge that both faces share, and then see if the edges
            # are reversed in order as you would expect in a well constructed mesh
            pair      = mesh.faces[[face_pair]]
            edges     = faces_to_edges(pair, sort=False)
            overlap   = group_rows(np.sort(edges,axis=1), require_count=2)
            edge_pair = edges[[overlap[0]]]
            reversed  = edge_pair[0][0] != edge_pair[1][0]
            if reversed: continue
            # if the edges aren't reversed, invert the order of one of the faces
            # and negate its normal vector
            mesh.faces[face_pair[1]] = mesh.faces[face_pair[1]][::-1]
            mesh.face_normals[face_pair[1]] *= (reversed*2) - 1
            
        # the normals of every connected face now all pointed in 
        # the same direction, but there is no guarantee that they aren't all
        # pointed in the wrong direction
        faces           = mesh.faces[[connected]]
        faces_x         = np.min(mesh.vertices[:,0][[faces]], axis=1)
        left_order      = np.argsort(faces_x)
        left_values     = faces_x[left_order]
        left_candidates = np.abs(left_values - left_values[0]) < TOL_ZERO
        backwards       = None
        
        # note that we have to find a face which ISN'T perpendicular to the x axis 
        # thus we go through all the candidate faces that are at the extreme left
        # until we find one that has a nonzero dot product with the x axis
        for leftmost in left_order[left_candidates]:                
            face_dot = np.dot([-1.0,0,0], mesh.face_normals[leftmost]) 
            if abs(face_dot) > TOL_ZERO: 
                backwards = face_dot < 0.0
                break
        if backwards: mesh.face_normals[[connected]] *= -1.0
        
        winding_tri  = connected[0]
        winding_test = np.diff(mesh.vertices[[mesh.faces[winding_tri]]], axis=0)
        winding_dir  = np.dot(unitize(np.cross(*winding_test)), mesh.face_normals[winding_tri])
        if winding_dir < 0: mesh.faces[[connected]] = np.fliplr(mesh.faces[[connected]])
def drawGraph(inputFile, outputFile):
	listEdge = pd.read_csv(inputFile)
	arrayList = listEdge.ix[:,[2,3]].as_matrix()
	arrayList
	G = nx.from_edgelist(arrayList)
	limits=plt.axis('off')
	
	nx.draw_networkx(G,with_labels=True, font_size=5, node_size=500)
	plt.savefig(outputFile)
Exemple #16
0
 def social_following(self):
     # FIXME: add proper site context to ploneintranet.network graph
     graph = queryUtility(INetworkGraph)
     result = []
     # FIXME: add proper API accessor to ploneintranet.network graph
     for user in graph._following["user"].keys():
         for following in graph.get_following("user", user):
             result.append((user, following))
     return nx.from_edgelist(result,
                             create_using=nx.DiGraph())
def cube_neighbor_graph(file_name):
  # Builds and returns the 3x3x3 cube Game of Life neighbor graph given a file
  # of line adjacencies.
  with open(file_name, 'rb') as f:
    line, line_prev, edge_list = [], [], []
    for line in (map(int, line.strip().split()) for line in f):
      if line and line_prev:
        for edge in line_edges(line_prev, line): edge_list.append(edge)
      line_prev = line
  return nx.from_edgelist(edge_list, create_using=nx.Graph())
Exemple #18
0
    def calculate(self):
        """Lazy initialization."""

        # TODO: ensure that this is fully unrestricted / runs as admin

        catalog = api.portal.get_tool('portal_catalog')
        content_tree = []
        content_authors = []
        content_tags = []
        for brain in catalog():
            context = brain.getObject()
            context_path = 'path:%s' % '/'.join(context.getPhysicalPath())

            for child_id in context.objectIds():
                child_path = '%s/%s' % (context_path, child_id)
                # containment is a bidirectional relationship
                content_tree.append((context_path, child_path))
                content_tree.append((child_path, context_path))

            # TODO: add reference links
            # TODO: add text links

            for author in context.creators:
                # authorship is a bidirectional relationship
                content_authors.append((context_path, 'user:%s' % author))
                content_authors.append(('user:%s' % author, context_path))

            # TODO: add sharing

            for tag in context.Subject():
                # tagging is bidirectional between context and tag
                content_tags.append((context_path, 'tag:%s' % tag))
                content_tags.append(('tag:%s' % tag, context_path))

        self._cache['content_tree'] = nx.from_edgelist(
            content_tree,
            create_using=nx.DiGraph())
        self._cache['content_authors'] = nx.from_edgelist(
            content_authors,
            create_using=nx.DiGraph())
        self._cache['content_tags'] = nx.from_edgelist(
            content_tags,
            create_using=nx.DiGraph())
def construct_constrained_graph(adj, r, n):
    """
    given an adjacency matrix adj in the form of a condensed distance matrix
    (of the kind returned by pdist) for n observations, returns the similarity
    graph for all distances less than or equal to r.
    """
    ij = row_col_from_condensed_index(n, np.where(adj<=r)[0])
    g = nx.from_edgelist(zip(*ij))
    g.add_nodes_from(range(n))
    return g
Exemple #20
0
 def facets_nx():
     graph_parallel = nx.from_edgelist(face_idx[parallel])
     facets_idx = np.array([list(i) for i in nx.connected_components(graph_parallel)])
     #### commented by weiwei
     # should also return the single triangles
     facets_idx_extra = copy.deepcopy(facets_idx.tolist())
     for item in range(mesh.faces.shape[0]):
         if item not in [i for subitem in facets_idx.tolist() for i in subitem]:
             facets_idx_extra.append([item])
     return np.array(facets_idx_extra)
Exemple #21
0
def create_inverse_degree_matrix(edges):
    """
    Creating an inverse degree matrix from an edge list.
    :param edges: Edge list.
    :return D_1: Inverse degree matrix.
    """
    graph = nx.from_edgelist(edges)
    ind = range(len(graph.nodes()))
    degs = [1.0/graph.degree(node) for node in range(graph.number_of_nodes())]
    D_1 = sparse.coo_matrix((degs,(ind,ind)),shape=(graph.number_of_nodes(), graph.number_of_nodes()),dtype=np.float32)
    return D_1
 def __init__(self, args):
     """
     Initialization method which reads the arguments.
     :param args: Arguments object.
     """
     self.args = args
     self.graph = nx.from_edgelist(
         pd.read_csv(args.input_path).values.tolist())
     self.shape = (len(self.graph.nodes()), len(self.graph.nodes()))
     self.do_walks()
     self.do_processing()
Exemple #23
0
 def split_nx():
     adjacency_graph = nx.from_edgelist(adjacency)
     # make sure every face has a node, so single triangles
     # aren't discarded (as they aren't adjacent to anything)
     if not only_watertight:
         # if we are allowing non- watertight result add nodes for every
         # face to make sure single, disconnected triangles are in the graph
         adjacency_graph.add_nodes_from(np.arange(len(mesh.faces)))
     components = nx.connected_components(adjacency_graph)
     result = mesh.submesh(components, only_watertight=only_watertight)
     return result
Exemple #24
0
def test_nx_qubit_layout_2():
    g = nx.from_edgelist(
        [
            (cirq.LineQubit(0), cirq.LineQubit(1)),
            (cirq.LineQubit(1), cirq.LineQubit(2)),
        ]
    )
    pos = ccr.nx_qubit_layout(g)
    for k, (x, y) in pos.items():
        assert x == k.x
        assert y == 0.5
def read_edgelist(infile, sep=' ', header=False):
    if header:
        lines = open(infile, 'r').readlines()[1:]
    else:
        lines = open(infile, 'r').readlines()
    edges = []
    for line in lines:
        a, b = line.rstrip().split(sep)
        edges.append((a, b))
    g = nx.from_edgelist(edges)
    return (g)
Exemple #26
0
def next_sibling_shortest_paths(tree_edges):
    sibling_edges = next_sibling_edges(tree_edges).numpy()
    G_siblings = nx.from_edgelist(sibling_edges, create_using=nx.DiGraph)
    sps = list(nx.all_pairs_dijkstra_path_length(G_siblings))
    sibling_sp_edgelist = torch.tensor([(from_node, to_node, dist)
                                        for from_node, dct in sps
                                        for to_node, dist in dct.items()],
                                       dtype=torch.long)
    sibling_sp_edgelist = sibling_sp_edgelist[sibling_sp_edgelist[:, 2] > 0]

    return sibling_sp_edgelist
Exemple #27
0
def load_network(network_file=network_path, edgelist=False):
    with open(network_file, 'r') as fl:
        rd = reader(fl)
        data = []

        for row in rd:
            data.append(row)

    if edgelist:
        return from_edgelist(data)
    return from_numpy_array(array(data))
Exemple #28
0
def global_sparsification_by_feature_sim(edge_list,
                                         feature,
                                         del_ratio=0.5,
                                         add_ratio=0.5,
                                         sim_func='cos'):
    def similarity_func(x1, x2):
        if sim_func == 'cos':
            return np.dot(x1, x2) / (norm(x1) * norm(x2))
        elif sim_func == 'euc':
            return 1 - norm(x1 - x2)
        elif sim_func == 'rand':
            return np.random.rand()
        return 1

    # construct networkx graph
    g = nx.from_edgelist(edge_list, create_using=nx.Graph)
    del_edge_cnt = min(int(del_ratio * len(edge_list)), len(edge_list))
    add_edge_cnt = int(add_ratio * len(edge_list))

    # remove edges
    if del_ratio > 0:
        results = []
        for u, v in edge_list:
            p = similarity_func(feature[u], feature[v])
            results.append((p, u, v))
        results.sort()
        edge_list_cut = np.array(results[del_edge_cnt:], dtype=int)[:, 1:3]
    else:
        edge_list_cut = edge_list

    # add edges
    if add_ratio > 0:
        candidate_links = set()
        for node_i in g.nodes:
            for node_j in nx.neighbors(g, node_i):
                for node_k in nx.neighbors(g, node_j):
                    if node_i < node_k and not g.has_edge(node_i, node_k):
                        candidate_links.add((node_i, node_k))

        results = []
        for u, v in list(candidate_links):
            if u != v:
                p = similarity_func(feature[u], feature[v])
                results.append((p, u, v))
        results.sort(reverse=True)
        add_edge_list = np.array(results[:add_edge_cnt], dtype=int)[:, 1:3]

    if del_ratio >= 1:
        result_edges = add_edge_list
    elif add_ratio == 0:
        result_edges = edge_list_cut
    else:
        result_edges = np.concatenate((edge_list_cut, add_edge_list), axis=0)
    return result_edges
def construct_sparse_graph_networkx(graph_file_name):
    #In this task, we will compare our method with NetworkX
    #   one of the state of the art graph analytics platorm
    # Networkx has a from_edgelist function that accepts an array of edges
    #   and construct a graph from it.
    #Read the input file and popular the variable edge_list
    #####################Task t3d: your code below#######################
    edge_list = None
    #####################Task t3d: your code below#######################
    G = nx.from_edgelist(edge_list, create_using=nx.DiGraph())
    return G 
def load_raw_data(edgefile):
    """Edge list file to sparse matrix"""
    edgearray = np.loadtxt(edgefile)
    if edgearray.shape[1] == 3:
        edgearray = edgearray[:, :2]
    edgearray = edgearray.astype(np.int)

    g = nx.from_edgelist(edgearray)
    spmatrix = nx.to_scipy_sparse_matrix(g, format='csc')

    return spmatrix
Exemple #31
0
 def test_pagerank_tags_unweighted(self):
     """A minimal PR test"""
     G = nx.from_edgelist(config.CONTENT_TAGS)
     PR = nx.pagerank(G)
     seq = self._sorted_pagerank(PR)
     self.assertEqual(seq,
                      [('path:/plone/public', 0.25),
                       ('path:/plone/public/d1', 0.25),
                       ('tag:foo', 0.24),
                       ('tag:bar', 0.13),
                       ('tag:nix', 0.13)])
 def test_pagerank_tags_unweighted(self):
     """A minimal PR test"""
     G = nx.from_edgelist(config.CONTENT_TAGS)
     PR = nx.pagerank(G)
     seq = self._sorted_pagerank(PR)
     self.assertEqual(seq,
                      [('path:/plone/public', 0.25),
                       ('path:/plone/public/d1', 0.25),
                       ('tag:foo', 0.24),
                       ('tag:bar', 0.13),
                       ('tag:nix', 0.13)])
Exemple #33
0
def test_compiler_isa_to_graph(compiler_isa: CompilerISA):
    """
    Test that compiler_isa_to_graph transforms a ``CompilerISA`` to an ``nx.Graph``
    accurately and that an ``NxQuantumProcessor.qubit_topology`` is isomorphic to the
    raw ``nx.Graph``.
    """
    graph = compiler_isa_to_graph(compiler_isa)
    should_be = nx.from_edgelist([(0, 1), (1, 2), (0, 2), (0, 3)])
    assert nx.is_isomorphic(graph, should_be)

    nx_quantum_processor = NxQuantumProcessor(graph)
    assert nx.is_isomorphic(graph, nx_quantum_processor.qubit_topology())
Exemple #34
0
def create_dummy():
  edgelist = [
    (0, 1),
    (0, 2),
    (0, 3),
    (0, 4),
    (3, 6),
    (2, 4),
    (4, 5)
  ]

  return nx.from_edgelist(edgelist)
Exemple #35
0
def graph_reader(fpath):
    """
    Function to read a csv edge list and transform it to a networkx graph object.
    """
    edges = pd.read_csv(fpath)
    graph = nx.convert_node_labels_to_integers(
        nx.from_edgelist(edges.values.tolist()))
    assert list(graph.nodes())[0] == 0
    print('number of nodes', graph.number_of_nodes())
    print('number of edges', graph.number_of_edges())
    adj = nx.adjacency_matrix(graph)
    return graph, adj, None
def normalized_cen(df):

    g = nx.Graph(df)
    edge_g = nx.to_edgelist(g)
    GN = nx.from_edgelist(edge_g)
    #GN.remove_edges_from(nx.selfloop_edges(GN))

    degree_centrality = GN.degree(weight='weight')
    degree_access = dict(GN.degree(weight='weight'))
    norm_degree_access = normalize(degree_access)

    return norm_degree_access
Exemple #37
0
def parse_gene_graph(fname):
    with open(fname) as f:
        lines = f.readlines()
    header = lines[0]
    split_lines = [
        line.strip("\n").split("\t") for line in lines[1:len(lines)]
    ]
    edge_list = [(sl[0], sl[1], {
        'direction': sl[3],
        'score': float(sl[4])
    }) for sl in split_lines if float(sl[4]) == 1.0]
    return nx.from_edgelist(edge_list)
Exemple #38
0
def main(argv):
    """
    Entry point.
    """
    
    if len(argv) != 1:
        print "Usage: python graphml2gml.py <infile>"
        sys.exit(0)
    infile = argv[0]
    g = networkx.read_graphml(infile)
    networkx.write_gml(networkx.from_edgelist(g.edges()), 
                       infile.replace(".graphml", ".gml"))
    def test_draw_graph_directed_no_edge_labels(self):
        """
		Test that when drawing a directed graph with no edge labels, no legend is created.
		"""

        E = [('A', 'A')]
        G = nx.from_edgelist(E, create_using=nx.DiGraph)

        viz = drawable.Drawable(plt.figure(figsize=(10, 5)))
        nodes, node_names, edges, edge_names = viz.draw_graph(G)
        self.assertEqual(1, len(edges))
        self.assertFalse(viz.legend.lines[0])
Exemple #40
0
 def feature_extractor(data, rounds, name):
     graph = nx.from_edgelist(np.array(data.edge_index.T.cpu(), dtype=int))
     if data.x is not None:
         feature = {
             int(key): str(val)
             for key, val in enumerate(np.array(data.x.cpu()))
         }
     else:
         feature = dict(nx.degree(graph))
     graph_wl_features = Graph2Vec.wl_iterations(graph, feature, rounds)
     doc = TaggedDocument(words=graph_wl_features, tags=["g_" + name])
     return doc
Exemple #41
0
def broken_faces(mesh, color=None):
    '''
    Return the index of faces in the mesh which break the watertight status
    of the mesh. If color is set, change the color of the broken faces. 
    '''
    adjacency = nx.from_edgelist(mesh.face_adjacency)
    broken = [k for k, v in adjacency.degree().iteritems() if v != 3]
    broken = np.array(broken)
    if color is not None:
        if not is_sequence(color): color = [255, 0, 0]
        mesh.visual.face_colors[broken] = color
    return broken
Exemple #42
0
def load_graph(graph_path):
    """
    Reading a NetworkX graph.
    :param graph_path: Path to the edge list.
    :return graph: NetworkX object.
    """
    data = pd.read_csv(graph_path)
    edges = data.values.tolist()
    edges = [[int(edge[0]), int(edge[1])] for edge in edges]
    graph = nx.from_edgelist(edges)
    graph.remove_edges_from(nx.selfloop_edges(graph))
    return graph
Exemple #43
0
def test_networkx_backend_induction():

    backend = NetworkXBackEnd()
    graph = nx.from_edgelist([[0, 1], [1, 2], [2, 3], [2, 4], [2, 5]])

    subgraph = backend.get_subgraph(graph, [2, 3, 4])

    for node in backend.get_node_iterator(subgraph):
        assert node in [2, 3, 4]

    for edge in backend.get_edge_iterator(subgraph):
        assert edge in [(2, 3), (2, 4)]
def test_device_stuff():
    topo = nx.from_edgelist([(0, 4), (0, 99)])
    qc = QuantumComputer(
        name='testy!',
        qam=None,  # not necessary for this test
        device=NxDevice(topo),
        compiler=DummyCompiler())
    assert nx.is_isomorphic(qc.qubit_topology(), topo)

    isa = qc.get_isa(twoq_type='CPHASE')
    assert sorted(isa.edges)[0].type == 'CPHASE'
    assert sorted(isa.edges)[0].targets == [0, 4]
Exemple #45
0
def broken_faces(mesh, color=None):
    '''
    Return the index of faces in the mesh which break the watertight status
    of the mesh. If color is set, change the color of the broken faces. 
    '''
    adjacency = nx.from_edgelist(mesh.face_adjacency)
    broken    = [k for k, v in adjacency.degree().iteritems() if v != 3]
    broken    = np.array(broken)
    if color is not None:
        if not is_sequence(color): color = [255,0,0]
        mesh.visual.face_colors[broken] = color
    return broken
Exemple #46
0
Fichier : dgk.py Projet : zrt/cogdl
 def feature_extractor(data, rounds, name):
     graph = nx.from_edgelist(np.array(data.edge_index.T.cpu(), dtype=int))
     if data.x is not None:
         feature = {
             int(key): str(val.argmax(axis=0))
             for key, val in enumerate(np.array(data.x.cpu()))
         }
     else:
         feature = dict(nx.degree(graph))
     graph_wl_features = DeepGraphKernel.wl_iterations(
         graph, feature, rounds)
     return graph_wl_features
Exemple #47
0
    def to_networkx(self):
        """
        Return a `networkx` copy of this graph.

        Returns
        ----------
        graph : networkx.DiGraph
          Directed graph.
        """
        import networkx
        return networkx.from_edgelist(self.to_edgelist(),
                                      create_using=networkx.DiGraph)
Exemple #48
0
    def calculate(self):
        """Lazy initialization."""

        # TODO: ensure that this is fully unrestricted / runs as admin

        catalog = api.portal.get_tool('portal_catalog')
        content_tree = []
        content_authors = []
        content_tags = []
        for brain in catalog():
            context = brain.getObject()
            context_path = 'path:%s' % '/'.join(context.getPhysicalPath())

            for child_id in context.objectIds():
                child_path = '%s/%s' % (context_path, child_id)
                # containment is a bidirectional relationship
                content_tree.append((context_path, child_path))
                content_tree.append((child_path, context_path))

            # TODO: add reference links
            # TODO: add text links

            for author in context.Creators():
                # authorship is a bidirectional relationship
                content_authors.append((context_path, 'user:%s' % author))
                content_authors.append(('user:%s' % author, context_path))

            # TODO: add sharing

            for tag in context.Subject():
                # tagging is bidirectional between context and tag
                content_tags.append((context_path, 'tag:%s' % tag))
                content_tags.append(('tag:%s' % tag, context_path))

        self._cache['content_tree'] = nx.from_edgelist(
            content_tree, create_using=nx.DiGraph())
        self._cache['content_authors'] = nx.from_edgelist(
            content_authors, create_using=nx.DiGraph())
        self._cache['content_tags'] = nx.from_edgelist(
            content_tags, create_using=nx.DiGraph())
Exemple #49
0
def compute_spanning_tree(G):
    # The Spanning Tree of G
    def build_graph(vertices, edges):
        graph = {}

        for vertex in vertices:

            graph[vertex] = []

            for edge in edges:
                if vertex in edge:
                    graph[vertex].append(edge)

        return graph

    edges = G.edges()
    vertices = G.nodes()

    graph = build_graph(vertices, edges)

    costs = dict()
    edges = dict()
    for node in graph.keys():
        costs[node] = float('inf')
        edges[node] = None

    unvisited_vertices = {k: v for k, v in costs.items()}

    root = graph.keys()[0]

    costs[root] = 0
    del edges[root]

    while unvisited_vertices:
        current_node = min(unvisited_vertices.keys(),
                           key=(lambda k: unvisited_vertices[k]))
        del unvisited_vertices[current_node]

        cost = costs[current_node]
        for edge in graph[current_node]:

            node = [n for n in edge if n != current_node][0]

            new_cost = cost + 1
            if (node in unvisited_vertices and new_cost < costs[node]):
                costs[node] = new_cost
                unvisited_vertices[node] = new_cost
                edges[node] = (current_node, node)

    ST = nx.from_edgelist(edges.values())

    return ST
    def evaluate(self):
        coords = self.get_target('main')
        if self.eval_hook:
            self.eval_hook(self)
        if self.args.lambda_anchor == 0:  # anchor = centre
            coords.W.array[:, 1:(self.args.dim +
                                 1)] = coords.W.array[:, (self.args.dim + 1):]
        if (self.args.gpu > -1):
            dat = coords.xp.asnumpy(coords.W.data).copy()
        else:
            dat = coords.W.data.copy()

        np.savetxt(os.path.join(self.args.outdir,
                                "coords{:0>4}.csv".format(self.count)),
                   dat,
                   fmt='%1.5f',
                   delimiter=",")
        redge = reconstruct(dat, dag=self.args.dag)
        if self.args.reconstruct:
            np.savetxt(os.path.join(
                self.args.outdir,
                "reconstructed{:0>4}.csv".format(self.count)),
                       redge,
                       fmt='%i',
                       delimiter=",")
        if self.args.plot:
            plot_disks(
                dat,
                os.path.join(self.args.outdir,
                             "plot{:0>4}.png".format(self.count)))
        self.count += 1

        # loss eval
        if self.args.validation:
            f1, precision, recall, accuracy = compare_graph(self.graph,
                                                            nx.from_edgelist(
                                                                redge,
                                                                nx.DiGraph()),
                                                            output=False)
            if self.args.lambda_anchor > 0:
                anchor_violation, num_vert = check_anchor_containment(dat)
                anchor_ratio = anchor_violation / num_vert
            else:
                anchor_ratio = 0
            return {
                "myval/rec": recall,
                "myval/f1": f1,
                "myval/prc": precision,
                "myval/anc": anchor_ratio
            }
        else:
            return {"myval/none": 0}
Exemple #51
0
def random_rewired_fast(G, debug=False):
    def order_edge(ed):
        if ed[0]>ed[1]:
            return (ed[1],ed[0])
        else:
            return ed

    ne = G.number_of_edges()
    edge2ind = {}
    ind2edge = {}
    count=0
    for edge in G.edges_iter():
        edge2ind[edge] = count
        ind2edge[count] = edge
        count = count +1
    for i in xrange(10*ne):
        n1 = random.randint(0,ne-1)
        n2 = random.randint(0,ne-1)
        if n1==n2:
            continue
        e1 = ind2edge[n1]
        e2 = ind2edge[n2]
        if random.random()<.5:
            en1 = order_edge((e1[0],e2[1]))
            en2 = order_edge((e1[1],e2[0]))
        else:
            en1 = order_edge((e1[0], e2[0]))
            en2 = order_edge((e1[1], e2[1]))
        # check if new edges already exist
        if (en1 not in edge2ind) and (en2 not in edge2ind):
	    # check whether we are putting in self loops
            if (en1[0]!=en1[1])and(en2[0]!=en2[1]):
                # put the new edges in and remove the old edges
                edge2ind.pop(e1)
                edge2ind.pop(e2)
                edge2ind[en1] = n1
                edge2ind[en2] = n2
            
                ind2edge[n1] = en1
                ind2edge[n2] = en2
            else:
                if debug:
                    print "had to skip to avoid putting self edges"
        else:
            # we had to skip the move
            if debug:
                print "had to skip move because new edges already exist"
        if debug:
            if (i%100)==0:
                print "rewired %i times" %i
    Grewired = nx.from_edgelist(edge2ind.keys())
    return Grewired
Exemple #52
0
def run_quilc_pass(circ: Circuit, backend: str):
    try:
        RebaseQuil().apply(circ)
        p_circ = tk_to_pyquil(circ)
        if backend == _BACKEND_IBM:
            devgraph = nx.from_edgelist(ibm_coupling)
            twoq_type = ['CZ']
            twoq_set = {OpType.CZ}
        elif backend == _BACKEND_RIGETTI:
            devgraph = nx.from_edgelist(rigetti_coupling)
            twoq_type = ['CZ', 'XY']
            twoq_set = {OpType.CZ, OpType.ISWAP}
        elif backend == _BACKEND_GOOGLE:
            devgraph = nx.from_edgelist(google_coupling)
            twoq_type = ['CZ']
            twoq_set = {OpType.CZ}
        elif backend == _BACKEND_FULL:
            devgraph = nx.complete_graph(circ.n_qubits)
            twoq_type = ['CZ', 'XY']
            twoq_set = {OpType.CZ, OpType.ISWAP}
        isa = isa_from_graph(devgraph, twoq_type=twoq_type)
        device = Device_("dev", {"isa": isa.to_dict()})
        device._isa = isa
        qcompiler = QVMCompiler(PyquilConfig().quilc_url, device, timeout=600)
        start_time = time.time()
        compiled_pr = qcompiler.quil_to_native_quil(p_circ)
        time_elapsed = time.time() - start_time
        print(time_elapsed)
        circ2 = pyquil_to_tk(compiled_pr)
        return [
            circ2.n_gates,
            circ2.depth(),
            sum(circ2.n_gates_of_type(op) for op in twoq_set),
            circ2.depth_by_type(twoq_set), time_elapsed
        ]
    except Exception as e:
        print(e)
        print("quilc error")
        return [nan, nan, nan, nan, nan]
Exemple #53
0
def fetch_graph(file):
  print "fetching graph"
  edgelist = []

  f = open(file, "r")

  for line in f:
    edge = tuple([int(i) for i in line.split()])
    edgelist.append(edge)

  f.close()
  print "finished fetching graph " + file + "\n"
  return nx.from_edgelist(edgelist)
Exemple #54
0
def read_graph(settings):
    """
    Reading the edge list from the path and returning the networkx graph object.
    :param path: Path to the edge list.
    :return graph: Graph from edge list.
    """
    if settings.edgelist_input:
        graph = nx.read_edgelist(settings.input)
    else:
        edge_list = pd.read_csv(settings.input).values.tolist()
        graph = nx.from_edgelist(edge_list)
        graph.remove_edges_from(nx.selfloop_edges(graph))
    return graph
Exemple #55
0
def __group_to_color(segments, samples=None, snp_range=None):
    '''Given a -list- of key-value pairs ((start, stop), (v1,v2)), where the key denotes
    a segment [start,stop) of the infinite integer lattice on which v1 and v2 are equal,
    Return a list of lists, each of contains segments of the same color (IBD sharing).'''
    (sub_segments, intersections, value_to_segments, _, _) = \
    im.segment.form_intersections(segments, True, samples=samples, snp_range=snp_range)
    # Build a graph G where the nodes = segments and edges = (segments intersect AND their sample
    # sets intersect). G's connected components are groups, where group is a set of segments of the
    # same color.
    return nx.connected_components(nx.from_edgelist(it.chain.from_iterable(it.product(sharing_segments, sharing_segments)
              for sharing_segments in (util.union_all(*(value_to_segments[i][x] for x in component))
                                       for i in xrange(len(sub_segments))
                                       for component in nx.connected_components(nx.Graph(intersections[i]))))))
Exemple #56
0
def facets_group(mesh):
    '''
    Find facets by grouping normals then getting the adjacency subgraph.
    The other two methods for finding facets rely on looking at the angle between
    adjacent faces, and then if they are below TOL_ZERO, adding them to a graph
    of parallel faces. This method should be somewhat more robust.
    '''
    adjacency = nx.from_edgelist(mesh.face_adjacency())
    facets    = deque()
    for row_group in group_rows(mesh.face_normals):
        if len(row_group) < 2: continue
        facets.extend([i for i in nx.connected_components(adjacency.subgraph(row_group)) if len(i) > 1])
    return np.array(facets)
Exemple #57
0
def all_pairs_shortest_paths(edges=None, G=None, directed=False, cutoff=None):
    assert edges is None or G is None
    if G is None:
        if directed:
            create_using = nx.DiGraph
        else:
            create_using = nx.Graph
        G = nx.from_edgelist(edges, create_using=create_using)
    sps = nx.all_pairs_dijkstra_path_length(G, cutoff=cutoff)
    values = torch.tensor([(dct[0], key, value) for dct in sps
                           for key, value in dct[1].items()],
                          dtype=torch.long)
    return values
Exemple #58
0
def read_graph(graph_path):
    """
    Method to read graph and create a target matrix with pooled adjacency matrix powers up to the order.
    :param args: Arguments object.
    :return graph: graph.
    """
    print("\nTarget matrix creation started.\n")
    try:
        graph = nx.from_edgelist(pd.read_csv(graph_path).values.tolist())
    except:
        graph = nx.from_edgelist(pd.read_csv(graph_path, sep='\t').values.tolist())

    graph.remove_edges_from(graph.selfloop_edges())

    # -- plot
    # import matplotlib.pyplot as plt
    # nx.draw(graph)
    # plt.show()
    # print("done")
    # exit()

    return graph
Exemple #59
0
def facets_nx(mesh):
    '''
    Returns lists of facets of a mesh. 
    Facets are defined as groups of faces which are both adjacent and parallel
    
    facets returned reference indices in mesh.faces
    If return_area is True, both the list of facets and their area are returned. 
    '''
    face_idx       = mesh.face_adjacency()
    normal_pairs   = mesh.face_normals[[face_idx]]
    parallel       = np.abs(np.sum(normal_pairs[:,0,:] * normal_pairs[:,1,:], axis=1) - 1) < TOL_PLANAR
    graph_parallel = nx.from_edgelist(face_idx[parallel])
    facets         = list(nx.connected_components(graph_parallel))
    return facets