예제 #1
0
 def generate_full_random_weighted_network(self, nnodes, nlayers):
     # random node names and random sequential layer numbers
     M = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
     letters = [
         'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
         'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'
     ]
     # start from layer 9 to check that there are no indexing issues with 9->10
     l0 = 9
     # have one node name constant over all layers
     c = 'fixed'
     for l in range(l0, l0 + nlayers):
         M.add_layer(l)
         nodes_l = [c] + [
             ''.join(np.random.choice(letters, 5))
             for i in range(nnodes - 1)
         ]
         for i1 in range(len(nodes_l)):
             for i2 in range(i1 + 1, len(nodes_l)):
                 M[nodes_l[i1], l][nodes_l[i2],
                                   l] = 1.0 - np.random.random()
         if l != l0:
             for n1 in nodes_l:
                 for n2 in M.iter_nodes(l - 1):
                     M[n1, l][n2, l - 1] = 1.0 - np.random.random()
     return M
예제 #2
0
def ER_from_net(M):
    net_layers = list(M.iter_layers())
    M_null = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    for layer in net_layers:
        M_null.add_layer(layer)
    for nl in M.iter_node_layers():
        M_null.add_node(nl[0], nl[1])
    edges_on_layers = dict()
    edges_between_layers = dict()
    for e in M.edges:
        if e[2] == e[3]:
            edges_on_layers[e[2]] = edges_on_layers.get(e[2], 0) + 1
        else:
            sorted_edge = tuple(sorted([e[2], e[3]]))
            edges_between_layers[sorted_edge] = edges_between_layers.get(
                sorted_edge, 0) + 1
    for layer in M_null.iter_layers():
        possible_edges = list(
            itertools.combinations(M_null.iter_nodes(layer), 2))
        for index in np.random.choice(len(possible_edges),
                                      size=edges_on_layers[layer],
                                      replace=False):
            M_null[possible_edges[index][0], possible_edges[index][1],
                   layer] = 1
    for layerpair in edges_between_layers:
        possible_edges = list(
            itertools.product(M_null.iter_nodes(layerpair[0]),
                              M_null.iter_nodes(layerpair[1])))
        for index in np.random.choice(len(possible_edges),
                                      size=edges_between_layers[layerpair],
                                      replace=False):
            M_null[possible_edges[index][0], possible_edges[index][1],
                   layerpair[0], layerpair[1]] = 1
    return M_null
예제 #3
0
def er_multilayer_partially_interconnected(nodes_by_layer, p, seed=None):
    """Create a one-aspect E-R multilayer network with given nodesets for each
    layer and edge probability p.
    
    Parameters
    ----------
    nodes_by_layer : sequence/iterator of sequences/iterators
        A sequence where each element is a sequence of nodes on a layer.
    p : float 0 <= p <= 1
        The probability that an edge exists between a node-layer pair.
    seed : int, str, bytes or bytearray
        Seed for network generation.
        
    Returns
    -------
    The generated network.
    """
    if seed == None:
        random.seed()
    else:
        random.seed(seed)
    network = pymnet.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    for layer, nodelist in enumerate(nodes_by_layer):
        network.add_layer(layer)
        for node in nodelist:
            network.add_node(node=node, layer=layer)
    numberings = dict()
    for index, nodelayer in enumerate(network.iter_node_layers()):
        numberings[nodelayer] = index
    for nodelayer1 in numberings:
        for nodelayer2 in numberings:
            if numberings[nodelayer1] > numberings[
                    nodelayer2] and random.random() < p:
                network[nodelayer1][nodelayer2] = 1
    return network
예제 #4
0
def read_weighted_network(filename):
    # There's no EOF in Python so we need to use clumsy flags (must iterate using 'for line in f'...)
    # The closest thing to EOF is the empty string '', but if there are empty lines in the file there's no way to know if it's the end or
    # a stripped newline
    # Edges should be the last entry in the file
    # Assuming layer names are integers (ordinally coupled multiplex)
    nodeflag = 0
    layerflag = 0
    edgeflag = 0
    with open(filename,'r') as f:
        # Read first line which should contain the type of the network (multilayer/multiplex)
        nettype = f.readline()
        if nettype[0] != '#':
            raise Exception('The first line should start with # and contain network type')
        nettype = nettype.strip(' #\n').lower()
        if nettype == 'multiplex':
            M = pn.MultiplexNetwork(couplings='ordinal',fullyInterconnected=True)
        else:
            # If network is not multiplex, we use the general multilayer class
            M = pn.MultilayerNetwork(aspects=1,fullyInterconnected=False)
        
        if nettype == 'multiplex':
            for line in f:
                line = line.strip()
                if line[0] == '#':
                    pass
                else:
                    if nodeflag == 1:
                        for node in line.split(';'):
                            M.add_node(node)
                        nodeflag = 0
                    if layerflag == 1:
                        for layer in line.split(';'):
                            M.add_layer(int(layer),1)
                        layerflag = 0
                    if edgeflag == 1:
                        edge = line.split(';')
                        if edge[2] == edge[3]:
                            M[edge[0],int(edge[2])][edge[1],int(edge[3])] = float(edge[4])
                        elif edge[0] == edge[1]:
                            pass
                        else:
                            raise Exception('Illegal inter-layer edges')
                    if line == 'nodes:':
                        nodeflag = 1
                    if line == 'layers:':
                        layerflag = 1
                    if line == 'edges:':
                        edgeflag = 1
        else:
            # If the network is a general multilayer one, we just need to set all the edges
            for line in f:
                line = line.strip()
                if edgeflag == 1:
                    edge = line.split(';')
                    M[edge[0],int(edge[2])][edge[1],int(edge[3])] = float(edge[4])
                if line == 'edges:':
                    edgeflag = 1
    
    return M
def make_specific_timewindows_network_sklearn(imgdata,
                                              start_times,
                                              end_times,
                                              layer_labels,
                                              n_clusters=100,
                                              nanlogfile=None):
    # start_times = vector of timewindow start times
    # end_times = vector of timewindow end times
    # NB! returns also the acquired models and voxellists in a dict with keys (start_time,end_time)
    # (start_time,end_time) = (model,voxellist)
    assert (len(start_times) == len(end_times))
    assert (len(start_times) == len(layer_labels))
    M = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    previous_voxels_in_clusters = dict()
    models = dict()
    layer_relabeling = dict()
    for ii in range(len(start_times)):
        layer_relabeling[ii] = layer_labels[ii]
        voxels_in_clusters = dict()
        model, voxellist = clustering.cluster_timewindow_scikit(
            imgdata[:, :, :, start_times[ii]:end_times[ii]],
            n_clusters=n_clusters)
        for jj, label in enumerate(model.labels_):
            voxels_in_clusters.setdefault(label, []).append(voxellist[jj])
        models[(start_times[ii], end_times[ii])] = (model, voxellist)
        R = calculate_cluster_correlation_matrix(
            imgdata[:, :, :, start_times[ii]:end_times[ii]],
            voxels_in_clusters)
        for kk in range(R.shape[0]):
            node1 = str(voxels_in_clusters[kk])
            for ll in range(kk + 1, R.shape[1]):
                node2 = str(voxels_in_clusters[ll])
                if not np.isnan(R[kk, ll]):
                    M[node1, ii][node2, ii] = R[kk, ll]
                else:
                    if nanlogfile != None:
                        with open(nanlogfile, 'a+') as f:
                            f.write('NaN correlation at nodes ' + node1 +
                                    ', ' + node2 + ' at time ' +
                                    str(start_times[ii]) + ', ' +
                                    str(end_times[ii]) + '\n')
                    else:
                        print('NaN correlation at nodes ' + node1 + ', ' +
                              node2 + ' at time ' + str(start_times[ii]) +
                              ', ' + str(end_times[ii]) + '\n')
        for cluster_number in voxels_in_clusters:
            for previous_cluster_number in previous_voxels_in_clusters:
                cluster_overlap = get_overlap(
                    set(voxels_in_clusters[cluster_number]),
                    set(previous_voxels_in_clusters[previous_cluster_number]))
                M[str(previous_voxels_in_clusters[previous_cluster_number]),
                  ii - 1][str(voxels_in_clusters[cluster_number]),
                          ii] = cluster_overlap
        previous_voxels_in_clusters = voxels_in_clusters  # reference to the same object
    # relabel layers (to avoid having every net have labels 0...k)
    M = pn.transforms.relabel(M, layerNames=layer_relabeling)
    return M, models
예제 #6
0
def threshold(net, threshold, method=">=", ignoreCouplingEdges=False):
    def accept_edge(weight, threshold, rule):
        if rule == ">=":
            return weight >= threshold
        elif rule == "<=":
            return weight <= threshold
        elif rule == ">":
            return weight > threshold
        elif rule == "<":
            return weight < threshold
        else:
            raise Exception("Invalid method for thresholding: " + str(rule))

    mplex = (type(net) == netmodule.MultiplexNetwork)
    if mplex:
        for coupling in net.couplings:
            if coupling[0] != "none":
                mplex = False

    if mplex:
        newNet = netmodule.MultiplexNetwork(
            couplings=net.couplings,
            directed=net.directed,
            noEdge=net.noEdge,
            fullyInterconnected=net.fullyInterconnected)
    else:
        newNet = netmodule.MultilayerNetwork(
            aspects=net.aspects,
            noEdge=net.noEdge,
            directed=net.directed,
            fullyInterconnected=net.fullyInterconnected)

    #copy nodes,layers,node-layers
    for node in net:
        newNet.add_node(node)
    for aspect in range(net.aspects):
        for layer in net.slices[aspect + 1]:
            newNet.add_layer(layer, aspect=aspect + 1)
    if not net.fullyInterconnected:
        for nodelayer in net.iter_node_layers():
            layer = lnodelayer[1:]
            if net.aspects == 1:
                layer = layer[0]
            newNet.add_node(nodelayer[0], layer=layer)

    if mplex:
        for layer in net.iter_layers():
            for edge in net.A[layer].edges:
                if accept_edge(edge[-1], threshold, rule=method):
                    newNet.A[layer][edge[0]][edge[1]] = edge[-1]
    else:
        for edge in net.edges:
            if accept_edge(edge[-1], threshold, rule=method):
                newNet[edge[:-1]] = edge[-1]
    return newNet
def constructMultilayer(nLayers, layers=[], nodes=[], edges=[]):
    """
    Using the pymnet module by Mikko Kivelä (https://bitbucket.org/bolozna/multilayer-networks-library/overview),
    creates a multilayer network object.
    
    Parameters:
    -----------
    nLayers: int, number of layers in the network
    layers: list of strings, names of the layers (default = [], in which case the
            layers will be automatically named with letters in alphabetical order)
    nodes: list of objects, nodes of the network. All nodes are present at all layers.
           (default = [], in which case no nodes are added; they can be added later on.)
    edges: list of tuples, edges of the network. Each edge should be a 
           ((source node, source layer), (target node, target layer) or
           ((source node, source layer), (target node, target layer), weight) tuple (in the
           previous case, weight is set to 1) (default = [], in which case no nodes are added; 
           they can be added later on.)
           
    Returns:
    --------
    mnet: a pymnet multilayer network object
    """
    mnet = pymnet.MultilayerNetwork(aspects=1)
    if len(layers) == 0:
        letters = list(string.ascii_lowercase)
        nLetters = len(letters)
        if nLayers > nLetters:
            layers = []
            n = -1
            for i in range(0, nLayers):
                if np.remainder(i, nLetters) == 0:
                    n = n + 1
                layers.append(letters[np.remainder(i, nLetters)] + str(n))
        else:
            layers = letters[0:nLayers]
    for layer in layers:
        mnet.add_layer(layer)
    if len(nodes) > 0:
        for node in nodes:
            mnet.add_node(node)
    if len(edges) > 0:
        for edge in edges:
            sourceNode = edge[0][0]
            sourceLayer = edge[0][1]
            targetNode = edge[1][0]
            targetLayer = edge[1][1]
            if len(edge) == 3:
                weight = edge[2]
            else:
                weight = 1
            mnet[sourceNode, sourceLayer][targetNode, targetLayer] = weight
    return mnet
예제 #8
0
class test_null_models(unittest.TestCase):
    M = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    M.add_layer(0)
    M.add_layer(1)
    M.add_layer(2)
    M.add_node('a', 0)
    M.add_node('b', 0)
    M.add_node('c', 0)
    M.add_node('c', 1)
    M.add_node('d', 1)
    M.add_node('e', 1)
    M.add_node('f', 1)
    M.add_node('g', 2)
    M.add_node('h', 2)
    M['a', 'b', 0] = M['a', 'c', 0] = M['c', 'd', 1] = M['c', 'e',
                                                         1] = M['g', 'h',
                                                                2] = 1
    M['c', 'c', 0, 1] = M['c', 'd', 0, 1] = M['c', 'g', 1,
                                              2] = M['d', 'g', 1,
                                                     2] = M['e', 'g', 1, 2] = 1

    def test_ER_from_net(self):
        M_null = null_models.ER_from_net(self.M)
        self.assertEqual(list(self.M.iter_node_layers()),
                         list(M_null.iter_node_layers()))
        self.assertEqual(list(self.M.iter_layers()),
                         list(M_null.iter_layers()))
        intra_0 = 0
        intra_1 = 0
        intra_2 = 0
        inter_01 = 0
        inter_12 = 0
        for e in M_null.edges:
            if e[2] == e[3]:
                if e[2] == 0:
                    intra_0 = intra_0 + 1
                elif e[2] == 1:
                    intra_1 = intra_1 + 1
                elif e[2] == 2:
                    intra_2 = intra_2 + 1
            else:
                if (e[2] == 0 and e[3] == 1) or (e[2] == 1 and e[3] == 0):
                    inter_01 = inter_01 + 1
                elif (e[2] == 1 and e[3] == 2) or (e[2] == 2 and e[3] == 1):
                    inter_12 = inter_12 + 1
        self.assertEqual(intra_0, 2)
        self.assertEqual(intra_1, 2)
        self.assertEqual(intra_2, 1)
        self.assertEqual(inter_01, 2)
        self.assertEqual(inter_12, 3)
예제 #9
0
def overlay_network(net):
    """Returns the overlay network of a multilayer network with 1 aspect.

    Returns
    -------
    net : MultiplexNetwork
       A new instance of multiplex network which is produced.
    """
    assert net.aspects==1
    newnet=netmodule.MultilayerNetwork()
    for layer in net.slices[1]:
        for node1 in net.slices[0]:
            for node2 in net.slices[0]:
                if net.directed or node1>node2:
                    newnet[node1,node2]=newnet[node1,node2]+net[node1,node2,layer,layer]
    return newnet
def compute_uniqueness(net):
	""" This function computes the percentage of unique structures in a network (with one layers). 
	It extracts the neighborhood of every node and maps them to an isomorphism class, 
	represented by complete graph invariant (equivalent to a canonical labelling).
	It then stores the number of neighborhoods for each isomorphism class 
	(in a dictionary with the complete invariant as a key), and return the number of classes occcuring just one time

	Parameters
	----------
	net : Multilayer network (also single-layer networks are acceptable)
		the network 

	Returns
	--------	
	float 
		the percentage of unique neighborhoods in the graph (e.g.: 1.00 if all the neighborhoods have unique structures, 0.00 if there no unique structures)
	"""
	dic_layer_1_neigh = {} #dictionary to store the list of neighbors for every node
	for n in list(net): #list of nodes
		dic_layer_1_neigh[n] = []

	#store the list of neighbors of every node
	for e in list(net.edges):
		if e[0] != e[1]:
			dic_layer_1_neigh[e[0]].append(e[1])
			dic_layer_1_neigh[e[1]].append(e[0])

	dic_count_n = {} #dictionary to store the number of occurences for each isomorphism class
	for k in dic_layer_1_neigh.keys(): #go through all nodes
		neigh_net = pymnet.MultilayerNetwork(aspects=0) #create a temporary network to store the neighborhood of a node
		for neigh in dic_layer_1_neigh[k]: #go through the neighbors
			for sec_neigh in dic_layer_1_neigh[neigh]: #go trough the neighbors of the neighbor
				if sec_neigh in dic_layer_1_neigh[k]: #if the node
					neigh_net[neigh, sec_neigh] = 1 #this adds an edge between the two nodes

		compl_inv_n = str(pymnet.get_complete_invariant(neigh_net)) #compute the complete invariant
		#increment the count of isomorphism classes
		try: 
			dic_count_n[compl_inv_n] += 1
		except KeyError, e: 
			dic_count_n[compl_inv_n] = 1
예제 #11
0
 def test_threshold_multilayer_network(self):
     testnet = pn.full_multilayer(10, [1, 2, 3])
     for edge in list(testnet.edges):
         if abs(edge[2] - edge[3]) > 1:
             testnet[edge[0], edge[1], edge[2], edge[3]] = 0
         else:
             testnet[edge[0], edge[1], edge[2], edge[3]] = -0.8
     testnet[2, 3, 1, 1] = 4
     testnet[4, 5, 1, 1] = 0.19
     testnet[3, 4, 1, 1] = 0.14
     testnet[5, 6, 2, 2] = 0.9
     testnet[7, 8, 2, 2] = 0.18
     testnet[6, 7, 2, 2] = -0.02
     testnet[4, 5, 3, 3] = 0.95
     testnet[2, 3, 3, 3] = 0.17
     testnet[3, 4, 3, 3] = 0.02
     testnet[3, 4, 1, 2] = 1.2
     testnet[6, 7, 1, 2] = 0.15
     testnet[9, 8, 1, 2] = 0.02
     testnet[5, 4, 2, 3] = 0.78
     testnet[8, 7, 2, 3] = 0.15
     testnet[1, 8, 2, 3] = -0.02
     thresholded_net = network_construction.threshold_multilayer_network(
         testnet, 0.05, 0.02)
     truenet = pn.MultilayerNetwork(aspects=1, fullyInterconnected=True)
     for ii in [1, 2, 3]:
         truenet.add_layer(ii)
     for ii in range(10):
         truenet.add_node(ii)
     truenet[2, 3, 1, 1] = 1
     truenet[4, 5, 1, 1] = 1
     truenet[5, 6, 2, 2] = 1
     truenet[7, 8, 2, 2] = 1
     truenet[4, 5, 3, 3] = 1
     truenet[2, 3, 3, 3] = 1
     truenet[3, 4, 1, 2] = 1
     truenet[6, 7, 1, 2] = 1
     truenet[5, 4, 2, 3] = 1
     truenet[8, 7, 2, 3] = 1
     self.assertEqual(thresholded_net, truenet)
예제 #12
0
def get_underlying_graph(net):
    """Creates the underlying graph of a multiplex network.

    Parameters
    ----------
    net : MultilayerNetwork, or MultiplexNetwork 
       The original network.

    Return
    ------
    MultilayerNetwork objects with zero aspects.
    Node-layer tuples are converted to node names that are strings.

    Notes
    -----
    The node names are converted into strings instead of tuples because
    Python doesn't differentiate between lists of arguments and tuples
    in __getitem__ function calls.

    A useful way of extracting the tuples back from the string is to
    use the eval method.
    """

    #The network object to be returned
    newNet = netmodule.MultilayerNetwork(aspects=0,
                                         noEdge=net.noEdge,
                                         directed=net.directed)

    #Add nodes
    for nl in net.iter_node_layers():
        newNet.add_node(str(nl))

    #Add edges
    for edge in net.edges:
        n1, n2 = net._link_to_nodes(edge[:-1])
        w = edge[-1]
        newNet[str(n1)][str(n2)] = w

    return newNet
예제 #13
0
def subnet(net,nodes,*layers,**kwargs):
    """Returns an induced subgraph with given set of nodes and layers.

    Parameters
    ----------
    net : MultilayerNetwork, MultiplexNetwork 
        The original network.
    nodes : sequence
        The nodes that span the induces subgraph.
    *layers : *sequence
        (Elementary) layers included in the subgraph. One parameter for each aspect.
    newNet : None, MultilayerNetwork, MultiplexNetwork    
        An empty new network or None. If None, the new network is created as a
        an empty copy of the net. The edges and nodes are copied to this network.
    nolinks : bool
        If set True, this function does not copy any links. That is, the returned
        network is _not_ an induced subnetwork but an empty network.

    Return
    ------
    subnet : type(net), or type(newNet)
        The induced subgraph that contains only nodes given in
        `nodes` and the edges between those nodes that are
        present in `net`. Node properties etc are left untouched.
    """

    if "newNet" in kwargs:
        newNet=kwargs["newNet"]
    else:
        newNet=None

    if "nolinks" in kwargs:
        nolinks=kwargs["nolinks"]
    else:
        nolinks=False

    assert len(layers)==net.aspects, "Please give layers for each aspect."
    nodelayers=[]
    for a,elayers in enumerate(itertools.chain([nodes],layers)):
        if elayers==None:
            nodelayers.append(set(net.get_layers(a)))
        else:
            nodelayers.append(set(elayers))

    if newNet==None:
        if isinstance(net,netmodule.MultiplexNetwork):
            newNet=netmodule.MultiplexNetwork(couplings=net.couplings,
                                    directed=net.directed,
                                    noEdge=net.noEdge,
                                    fullyInterconnected=net.fullyInterconnected)
        elif isinstance(net,netmodule.MultilayerNetwork):
            newNet=netmodule.MultilayerNetwork(aspects=net.aspects,
                                     noEdge=net.noEdge,
                                     directed=net.directed,
                                     fullyInterconnected=net.fullyInterconnected)
        else:
            raise Exception("Invalid net type: "+str(type(net)))

    addedElementaryLayers=[]
    for a,elayers in enumerate(nodelayers):#enumerate(itertools.chain((nodes,),layers)):
        if net.fullyInterconnected or a!=0:
            addedElementaryLayers.append(0)
            oldElementaryLayers=net.get_layers(a)
            for elayer in elayers:
                if elayer in oldElementaryLayers:
                    newNet.add_layer(elayer,a)
                    addedElementaryLayers[-1]+=1

    if not net.fullyInterconnected:
        totalNodeLayers=0
        for nl in net.iter_node_layers():
            if reduce(lambda x,y:x and y, (e in nodelayers[a] for a,e in enumerate(nl))):
                if net.aspects==1:
                    newNet.add_node(nl[0],layer=nl[1])
                else:
                    newNet.add_node(nl[0],layer=nl[1:])
                totalNodeLayers+=1
    else:
        totalNodeLayers=reduce(lambda x,y:x*y,addedElementaryLayers)


    #copy the links
    if not nolinks:
        if isinstance(net,netmodule.MultiplexNetwork):
            #Go through all the combinations of new layers
            for layer in itertools.product(*nodelayers[1:]):
                layer=layer[0] if net.aspects==1 else layer
                subnet(net.A[layer],nodelayers[0],newNet=newNet.A[layer],nolinks=nolinks)
        elif isinstance(net,netmodule.MultilayerNetwork):
            for nl1 in itertools.product(*nodelayers):
                nl1 = nl1[0] if net.aspects==0 else nl1
                if net[nl1].deg()>=totalNodeLayers:
                    for nl2 in itertools.product(*nodelayers):
                        nl2 = nl2[0] if net.aspects==0 else nl2
                        if net[nl1][nl2]!=net.noEdge:
                            newNet[nl1][nl2]=net[nl1][nl2]
                else:
                    if net.aspects==0:
                        for nl2 in net[nl1]:
                            if nl2 in nodelayers[0]:
                                newNet[nl1][nl2]=net[nl1][nl2]
                    else:
                        for nl2 in net[nl1]:
                            if reduce(lambda x,y:x and y, (e in nodelayers[a] for a,e in enumerate(nl2))):
                                newNet[nl1][nl2]=net[nl1][nl2]
        else:
            raise Exception("Invalid net type: "+str(type(net)))

    return newNet
예제 #14
0
    def test_write_weighted_network(self):
        M_multiplex = pn.MultiplexNetwork(couplings='ordinal',
                                          fullyInterconnected=True)
        M_multiplex['(1, 2, 3)', 0]['(2, 3, 4)', 0] = 0.5
        M_multiplex['(2, 3, 4)', 1]['(3, 4, 5)', 1] = 0.999
        M_multiplex['(3, 4, 5)', 2]['(1, 2, 3)', 2] = 1
        possible_nodelines = set([
            '(1, 2, 3);(2, 3, 4);(3, 4, 5)\n',
            '(1, 2, 3);(3, 4, 5);(2, 3, 4)\n',
            '(2, 3, 4);(1, 2, 3);(3, 4, 5)\n',
            '(2, 3, 4);(3, 4, 5);(1, 2, 3)\n',
            '(3, 4, 5);(1, 2, 3);(2, 3, 4)\n',
            '(3, 4, 5);(2, 3, 4);(1, 2, 3)\n'
        ])
        possible_layerlines = set(
            ['0;1;2\n', '0;2;1\n', '1;0;2\n', '1;2;0\n', '2;0;1\n', '2;1;0\n'])
        edgeset = set([
            '(1, 2, 3);(2, 3, 4);0;0;0.5\n', '(1, 2, 3);(1, 2, 3);0;1;1.0\n',
            '(1, 2, 3);(1, 2, 3);1;2;1.0\n', '(1, 2, 3);(3, 4, 5);2;2;1\n',
            '(2, 3, 4);(2, 3, 4);0;1;1.0\n', '(2, 3, 4);(3, 4, 5);1;1;0.999\n',
            '(2, 3, 4);(2, 3, 4);1;2;1.0\n', '(3, 4, 5);(3, 4, 5);0;1;1.0\n',
            '(3, 4, 5);(3, 4, 5);1;2;1.0\n'
        ])
        network_io.write_weighted_network(
            M_multiplex, 'test_for_network_writing_WILL_BE_REMOVED.txt',
            'Created by test_write_weighted_network')
        try:
            with open('test_for_network_writing_WILL_BE_REMOVED.txt',
                      'r') as f:
                self.assertEqual(f.readline(), '# Multiplex\n')
                self.assertEqual(f.readline(),
                                 '# Created by test_write_weighted_network\n')
                self.assertEqual(f.readline(), 'nodes:\n')
                self.assertTrue(f.readline() in possible_nodelines)
                self.assertEqual(f.readline(), 'layers:\n')
                self.assertTrue(f.readline() in possible_layerlines)
                self.assertEqual(f.readline(), 'edges:\n')
                for line in f:
                    self.assertTrue(line in edgeset)
                    edgeset.remove(line)
                self.assertEqual(len(edgeset), 0)
                self.assertEqual(f.readline(), '')
        finally:
            os.remove('test_for_network_writing_WILL_BE_REMOVED.txt')

        M_multilayer = pn.MultilayerNetwork(aspects=1,
                                            fullyInterconnected=False)
        M_multilayer['[(1, 2, 3),(2, 3, 4)]', 0]['[(3, 4, 5)]', 0] = 0.123
        M_multilayer['[(1, 2, 3)]', 1]['[(2, 3, 4),(3, 4, 5)]', 1] = 0.456
        M_multilayer['[(1, 2, 3),(2, 3, 4)]', 0]['[(1, 2, 3)]', 1] = 0.5
        M_multilayer['[(1, 2, 3),(2, 3, 4)]', 0]['[(2, 3, 4),(3, 4, 5)]',
                                                 1] = 0.333
        M_multilayer['[(3, 4, 5)]', 0]['[(1, 2, 3)]', 1] = 0
        M_multilayer['[(3, 4, 5)]', 0]['[(2, 3, 4),(3, 4, 5)]', 1] = 0.5
        possible_nodelines = set([
            x[0] + ';' + x[1] + ';' + x[2] + ';' + x[3] + '\n'
            for x in itertools.permutations([
                '[(1, 2, 3)]', '[(1, 2, 3),(2, 3, 4)]',
                '[(2, 3, 4),(3, 4, 5)]', '[(3, 4, 5)]'
            ])
        ])
        possible_layerlines = set(['0;1\n', '1;0\n'])
        edgeset = set([
            '[(1, 2, 3),(2, 3, 4)];[(2, 3, 4),(3, 4, 5)];0;1;0.333\n',
            '[(1, 2, 3),(2, 3, 4)];[(3, 4, 5)];0;0;0.123\n',
            '[(1, 2, 3),(2, 3, 4)];[(1, 2, 3)];0;1;0.5\n',
            '[(3, 4, 5)];[(2, 3, 4),(3, 4, 5)];0;1;0.5\n',
            '[(1, 2, 3)];[(2, 3, 4),(3, 4, 5)];1;1;0.456\n'
        ])
        network_io.write_weighted_network(
            M_multilayer, 'test_for_network_writing_WILL_BE_REMOVED.txt',
            'Created by test_write_weighted_network')
        try:
            with open('test_for_network_writing_WILL_BE_REMOVED.txt',
                      'r') as f:
                self.assertEqual(f.readline(), '# Multilayer\n')
                self.assertEqual(f.readline(),
                                 '# Created by test_write_weighted_network\n')
                self.assertEqual(f.readline(), 'nodes:\n')
                self.assertTrue(f.readline() in possible_nodelines)
                self.assertEqual(f.readline(), 'layers:\n')
                self.assertTrue(f.readline() in possible_layerlines)
                self.assertEqual(f.readline(), 'edges:\n')
                for line in f:
                    self.assertTrue(line in edgeset)
                    edgeset.remove(line)
                self.assertEqual(len(edgeset), 0)
                self.assertEqual(f.readline(), '')
        finally:
            os.remove('test_for_network_writing_WILL_BE_REMOVED.txt')
예제 #15
0
class test_subgraph_classification(unittest.TestCase):

    testnet = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    testnet[1, 2, 0, 0] = 1
    testnet[1, 1, 0, 1] = 1
    testnet[3, 3, 1, 2] = 1
    testnet[3, 4, 2, 2] = 1
    testnet[1, 5, 0, 0] = 1
    testnet[6, 7, 2, 2] = 1
    testnet[6, 6, 2, 3] = 1
    testnet[7, 7, 2, 3] = 1
    testnet[8, 9, 2, 2] = 1
    testnet[9, 9, 2, 3] = 1

    subnet1 = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    subnet1['a', 'b', 0, 0] = 1
    subnet1['a', 'a', 0, 1] = 1

    subnet2 = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    subnet2['a', 'b', 0, 0] = 1
    subnet2['a', 'a', 0, 1] = 1
    subnet2['b', 'b', 0, 1] = 1

    subnet3 = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    subnet3['a', 'b', 1, 1] = 1
    subnet3['a', 'a', 0, 1] = 1

    # nodelayer isomorphisms
    compinv1 = pn.get_complete_invariant(subnet1)
    compinv2 = pn.get_complete_invariant(subnet2)

    # node isomorphisms
    node_compinv1 = pn.get_complete_invariant(subnet1, [0])
    node_compinv2 = pn.get_complete_invariant(subnet2, [0])
    node_compinv3 = pn.get_complete_invariant(subnet3, [0])

    def test_find_isomorphism_classes_with_aggregated_dict(self):
        dd = collections.defaultdict(dict)
        subgraph_classification.find_isomorphism_classes(
            self.testnet, 2, 2, 'this_file_should_not_exist', 'all', dd, None)
        self.assertEqual(len(dd), 2)
        self.assertEqual(dd[self.compinv1], {(0, 1): 2, (1, 2): 1, (2, 3): 1})
        self.assertEqual(dd[self.compinv2], {(2, 3): 1})

    def test_find_isomorphism_classes_agg_dict_node_isomorphism_example_dicts(
            self):
        # test a) aggregated dict, b) node isomorphism, c) example generation
        dd = collections.defaultdict(dict)
        dd_e = dict()
        subgraph_classification.find_isomorphism_classes(
            self.testnet, 2, 2, 'this_file_should_not_exist', [0], dd, dd_e)
        self.assertEqual(len(dd), 3)
        self.assertEqual(dd[self.node_compinv1], {(0, 1): 2, (2, 3): 1})
        self.assertEqual(dd[self.node_compinv2], {(2, 3): 1})
        self.assertEqual(dd[self.node_compinv3], {(1, 2): 1})
        self.assertEqual(len(dd_e), 3)
        self.assertEqual(
            pn.get_complete_invariant(dd_e[self.node_compinv1], [0]),
            self.node_compinv1)
        self.assertEqual(
            pn.get_complete_invariant(dd_e[self.node_compinv2], [0]),
            self.node_compinv2)
        self.assertEqual(
            pn.get_complete_invariant(dd_e[self.node_compinv3], [0]),
            self.node_compinv3)
        self.assertEqual(pn.get_complete_invariant(dd_e[self.node_compinv1]),
                         self.compinv1)
        self.assertEqual(pn.get_complete_invariant(dd_e[self.node_compinv2]),
                         self.compinv2)
        self.assertEqual(pn.get_complete_invariant(dd_e[self.node_compinv3]),
                         self.compinv1)
예제 #16
0
def plot_complete_invariant_time_series(subnet_filename,
                                        title,
                                        xlabel,
                                        ylabel,
                                        savename=None,
                                        examples_filename=None,
                                        map_ni_to_nli=False,
                                        colormap=plt.cm.jet,
                                        **kwargs):

    M = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
    M['a', 'b', 0, 0] = 1
    M['a', 'a', 0, 1] = 1

    #layersetwise_complete_invariants = dict()
    complete_invariant_timeseries_dict = dict()
    layersets = set()
    invariants = set()

    if subnet_filename.strip()[-10:] == 'agg.pickle':
        f = open(subnet_filename, 'r')
        complete_invariant_timeseries_dict = pickle.load(f)
        f.close()
        for key in complete_invariant_timeseries_dict:
            invariants.add(key)
            layersets.update(
                [key2 for key2 in complete_invariant_timeseries_dict[key]])

    else:
        for subnet_data in subgraph_classification.yield_subnet_with_complete_invariant(
                subnet_filename):
            nodes = tuple(subnet_data[0])
            layers = tuple(sorted(subnet_data[1]))
            complete_invariant = subnet_data[2]
            layersets.add(layers)
            invariants.add(complete_invariant)
            #layersetwise_complete_invariants[layers] = layersetwise_complete_invariants.get(layers,dict())
            #layersetwise_complete_invariants[layers][complete_invariant] = layersetwise_complete_invariants[layers].get(complete_invariant,0) + 1
            complete_invariant_timeseries_dict[
                complete_invariant] = complete_invariant_timeseries_dict.get(
                    complete_invariant, dict())
            complete_invariant_timeseries_dict[complete_invariant][
                layers] = complete_invariant_timeseries_dict[
                    complete_invariant].get(layers, 0) + 1

    layersets = list(layersets)
    layersets.sort()
    full_range = zip(*(range(layersets[0][0], layersets[-1][-1] + 1)[ii:]
                       for ii in range(len(layersets[0]))))
    if layersets != full_range:
        layersets = full_range
    x_axis = list(range(layersets[0][0], layersets[-1][1]))

    # load example networks
    if examples_filename is not None:
        f = open(examples_filename, 'r')
        invdicts = pickle.load(f)
        f.close()
    else:
        nnodes = int(subnet_filename.split('/')[-1].strip()[0])
        nlayers = int(subnet_filename.split('/')[-1].strip()[2])
        example_nets_filename = 'example_nets/' + str(nnodes) + '_' + str(
            nlayers) + '.pickle'
        invdicts = complete_invariant_dicts.load_example_nets_file(
            example_nets_filename)

    # map node isomorphisms to nodelayer isomorphisms:
    if map_ni_to_nli:
        invariants = set()
        mapped_invdicts = dict()
        mapped_complete_invariant_timeseries_dict = dict()
        for complete_invariant in complete_invariant_timeseries_dict:
            nl_complete_invariant = pn.get_complete_invariant(
                invdicts[complete_invariant])
            if nl_complete_invariant not in mapped_invdicts:
                mapped_invdicts[nl_complete_invariant] = invdicts[
                    complete_invariant]
            invariants.add(nl_complete_invariant)
            mapped_complete_invariant_timeseries_dict[
                nl_complete_invariant] = mapped_complete_invariant_timeseries_dict.get(
                    nl_complete_invariant, dict())
            for tw in complete_invariant_timeseries_dict[complete_invariant]:
                mapped_complete_invariant_timeseries_dict[
                    nl_complete_invariant][
                        tw] = mapped_complete_invariant_timeseries_dict[
                            nl_complete_invariant].get(
                                tw, 0) + complete_invariant_timeseries_dict[
                                    complete_invariant][tw]
        complete_invariant_timeseries_dict = mapped_complete_invariant_timeseries_dict
        invdicts = mapped_invdicts

    # needed numbers of example figures
    number_of_invariants = len(invariants)
    grid_side_length = 0
    while grid_side_length**2 < number_of_invariants:
        grid_side_length = grid_side_length + 1
    ax_locs = list(
        itertools.product(range(grid_side_length),
                          range(grid_side_length, 2 * grid_side_length)))

    # figure
    fig = plt.figure(figsize=(12, 6))
    main_ax = plt.subplot2grid((grid_side_length, 2 * grid_side_length),
                               (0, 0),
                               rowspan=grid_side_length,
                               colspan=grid_side_length)
    colorcycler = plt.cycler('color',
                             colormap(np.linspace(0, 1, number_of_invariants)))
    main_ax.set_prop_cycle(colorcycler)
    #    side_ax = plt.subplot2grid((2,4),(0,2),projection='3d')
    #    side_ax2 = plt.subplot2grid((2,4),(0,3),projection='3d')
    #plt.hold(True)

    # identifiers for legend
    ids = range(len(invariants))

    for ii, compinv in enumerate(
            sorted(complete_invariant_timeseries_dict.iterkeys())):
        y_values = []
        for layerset in layersets:
            y_values.append(complete_invariant_timeseries_dict[compinv].get(
                layerset, 0))
        #main_ax = plt.subplot2grid((2,4),(0,0),rowspan=2,colspan=2)
        #main_ax.plot(x_axis,y_values,label=str(compinv))
        line = main_ax.plot(x_axis, y_values, label=str(ii))
        compinv_ax = plt.subplot2grid((grid_side_length, 2 * grid_side_length),
                                      ax_locs[ii],
                                      projection='3d')
        #example_ax = plt.gcf().add_axes((1.1,0,0.5,0.5),projection='3d')
        M = invdicts[compinv]
        pn.draw(M,
                layout='shell',
                alignedNodes=True,
                ax=compinv_ax,
                layerLabelRule={},
                nodeLabelRule={})
        #compinv_ax.text2D(0,0,str(ii),None,False,transform=compinv_ax.transAxes)
        if grid_side_length < 3:
            legend_loc = 'lower left'
        else:
            legend_loc = (0, 0)
        leg = compinv_ax.legend(labels=[''],
                                loc=legend_loc,
                                handles=line,
                                frameon=False)
        plt.setp(leg.get_lines(), linewidth=4)


#        if ii==0:
#            pn.draw(M,ax=side_ax)
#        else:
#            pn.draw(M,ax=side_ax2)
#plt.sca(fig.gca())

#    plt.xticks(x_axis,layersets,rotation=90,fontsize='small')
#    plt.xlabel(xlabel)
#    plt.ylabel(ylabel)
#    plt.title(title)
#    plt.legend(bbox_to_anchor=(1.05,1.),loc=0,fontsize='xx-small')
#    plt.yscale('log')
#    plt.margins(y=0.2)
#    plt.xlim([x_axis[0],x_axis[-1]])
#    plt.tight_layout()
#    plt.show()
    main_ax.set_xticks(x_axis)
    main_ax.set_xticklabels(layersets, rotation=90, fontsize='small')
    main_ax.set_yscale('log')
    main_ax.set_xlabel(xlabel)
    main_ax.set_ylabel(ylabel)
    main_ax.set_title(title)
    #main_ax.legend(bbox_to_anchor=(1.05,0),loc='upper left',ncol=number_of_invariants,fontsize='small')
    #main_ax.yscale('log')
    main_ax.margins(y=0.2)
    main_ax.set_xlim([x_axis[0], x_axis[-1]])
    plt.tight_layout()

    if savename == None:
        plt.show()
    else:
        plt.savefig(savename, format='pdf')
def make_clustered_multilayer(imgdata,timewindow=100,overlap=0,n_clusters=100,method='sklearn',template=None,nanlogfile=None):
    '''
    Possible methods:
    'sklearn' : hierarchical clustering from sklearn, different for each time window
    'template' : preconstructed clustering, same for each time window (requires parameter template : 3d ndarray with the same shape
        as imgdata.shape[0:3] where 0 denotes masked voxel and other values denote cluster identity
        Using this, n_clusters is ignored
    '''
    k = get_number_of_layers(imgdata.shape,timewindow,overlap)
    start_times,end_times = get_start_and_end_times(k,timewindow,overlap)
    
    if method == 'sklearn':
        M = pn.MultilayerNetwork(aspects=1,fullyInterconnected=False)
        previous_voxels_in_clusters = dict()
        for tw_no in range(k):
            # Create new object and make voxels_in_clusters refer to it (doesn't change previous_voxels_in_clusters)
            voxels_in_clusters = dict()
            #start = tw_no*(timewindow-overlap)
            #end = timewindow + tw_no*(timewindow-overlap)
            model,voxellist = clustering.cluster_timewindow_scikit(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],n_clusters=n_clusters)
            for ii,label in enumerate(model.labels_):
                voxels_in_clusters.setdefault(label,[]).append(voxellist[ii])
            R = calculate_cluster_correlation_matrix(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],voxels_in_clusters)
            for ii in range(R.shape[0]):
                node1 = str(voxels_in_clusters[ii])
                for jj in range(ii+1,R.shape[1]):
                    node2 = str(voxels_in_clusters[jj])
                    if not np.isnan(R[ii,jj]):
                        M[node1,tw_no][node2,tw_no] = R[ii,jj]
                    else:
                        if nanlogfile != None:
                            with open(nanlogfile,'a+') as f:
                                f.write('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
                        else:
                            print('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
            for cluster_number in voxels_in_clusters:
                for previous_cluster_number in previous_voxels_in_clusters:
                    cluster_overlap = get_overlap(set(voxels_in_clusters[cluster_number]),set(previous_voxels_in_clusters[previous_cluster_number]))
                    M[str(previous_voxels_in_clusters[previous_cluster_number]),tw_no-1][str(voxels_in_clusters[cluster_number]),tw_no] = cluster_overlap
            previous_voxels_in_clusters = voxels_in_clusters # reference to the same object
            
    elif method == 'template':
        M = pn.MultiplexNetwork(couplings='ordinal',fullyInterconnected=True)
        voxels_in_clusters = get_voxels_in_clusters(template)
        for tw_no in range(k):
            R = calculate_cluster_correlation_matrix(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],voxels_in_clusters)
            for ii in range(R.shape[0]):
                node1 = str(voxels_in_clusters[ii])
                for jj in range(ii+1,R.shape[1]):
                    node2 = str(voxels_in_clusters[jj])
                    if not np.isnan(R[ii,jj]):
                        M[node1,tw_no][node2,tw_no] = R[ii,jj]
                    else:
                        if nanlogfile != None:
                            with open(nanlogfile,'a+') as f:
                                f.write('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
                        else:
                            print('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
    
    else:
        raise NotImplementedError('This clustering not implemented')
    
    return M
def yield_clustered_multilayer_network_in_layersets(imgdata,layerset_size,timewindow=100,overlap=0,n_clusters=100,
                                                    method='sklearn',template=None,nanlogfile=None,
                                                    event_time_stamps=None,ROI_centroids=[],ROI_names=[],
                                                    consistency_threshold=-1,consistency_target_function='spatialConsistency',
                                                    f_transform_consistency=False,calculate_consistency_while_clustering=False,
                                                    n_consistency_CPUs=5,consistency_save_path='spatial-consistency.pkl',
                                                    n_consistency_iters=100,consistency_percentage_ROIs_for_thresholding=0,
                                                    n_ReHo_neighbors=6,percentage_min_centroid_distance=0,ReHo_measure='ReHo'):
    
    """
    Consistency-related inputs:
    ---------------------------
    ROI_centroids: n_ROIs x 3 np.array, coordinates (in voxels) of the ROI centroids, around which ROIs are grown in some clustering
                   methods (default=[]). Set to 'random' to use random seeds and to 'ReHo' to use seeds selected by maximal Regional 
                   Homogeneity.
    ROI_names: list of strs, names of ROIs (default=[])
    consistency_threshold: float or string. The lowest centroid-voxel correlation that leads to adding a voxel. All
                           thresholding approaches may lead to parcellation where some voxels don't belong to any ROI.
                           Options:
                           - -1: no thresholding (the default option)
                           - float: no voxel is added to a ROI if the value of consistency target function < threshold
                           - 'data-driven': no voxel is added to a ROI if after adding the voxel the mean correlation of the 
                           voxels of this ROI is lower than the mean correlation between the voxels of this ROI and the voxels
                           of any other ROI. However, the same voxel can be considered again later.
                           - 'strict data-driven': similar as above but when a voxel has been found to be sub-threshold
                           for a ROI, it is entirely removed from the base of possible voxels for this ROI
                           - 'voxel-wise': no voxel is added to a ROI if its average correlation to the voxels of this
                           ROI is lower than its average correlation at least consistency_percentage_ROIs_for_thresholding * n_ROIs
                           other ROIs
                           - 'voxel-neighbor': no voxel is added to a ROI if its average correlation to the voxels of this
                           ROI is lower than the average correlation of a voxel to its closest (6-voxel) neighborhood. This
                           threshold value is calculated as an average across all voxels before starting to build the ROIs.
    consistency_target_function: str, homogeneity  measure that will be optimized. Options: 
                                 'spatialConsistency': the mean Pearson correlation coefficient of the voxels already in the ROI and 
                                                       the candidate voxel (the default option)
                                 'correlationWithCentroid': the Pearson correlation between the ROI centroid time series and 
                                                            voxel time series
    f_transform_consistency: boolean; if True, Fischer z transform is applied to the correlations before averaging (default: False)
    calculate_consistency_while_clustering: boolean; if True, the consistency of each cluster is calculated and saved in a pickle
                                            file during the clustering. Note that setting this to True doesn't change clustering
                                            method. So, this can be applied together with 'template' or 'sklearn' methods. (default: False)
    n_consistency_CPUs: int, number of CPUs used for parallel consistency calculations if calculate_consistency_while_clustering == True.
                        (default: 5)
    consistency_save_path: str, a path to which the consistency pickle will be saved (default: 'spatial_consistency.pkl')
    n_consistency_iters: int, number of random seed sets to generate if ROI_centroids == 'random' (default = 100)
    consistency_percentage_ROIs_for_thresholding: float (from 0 to 1), used in thresholding (see above) (default = 0 that is interprested
                                                  as 1/n_ROIs)
    percentage_min_centroid_distance: float (from 0 to 1), the minimal distance between ReHo-based seeds is set as 
                                   percentageMinCentroidDistance times maximal dimension of imgdata (default = 0).
    n_ReHo_neighbors: int, number or neighbors used for calculating ReHo if ReHo-based seeds are to be used; options: 6 (faces),
                    18 (faces + edges), 26 (faces + edges + corners) (default = 6)
    ReHoMeasure: str, the measure of the neighbourhood similarity used to pick the ReHo-based seeds, options 'ReHo', 'spatialConsistency' (default = 'ReHo')
    """
    
    
    # If event_time_stamps is specified, then they are used to compute start_times, end_times and k (and timewindow and overlap are ignored).
    # Otherwise, timewindow and overlap are used ot compute start_times, end_times and k.
    if event_time_stamps == None:
        k = get_number_of_layers(imgdata.shape,timewindow,overlap)
        start_times,end_times = get_start_and_end_times(k,timewindow,overlap)
    else:
        assert isinstance(event_time_stamps,list)
        k = len(event_time_stamps) + 1
        start_times = [0] + event_time_stamps
        end_times = event_time_stamps + [imgdata.shape[3]]
    layersets = zip(*(range(k)[ii:] for ii in range(layerset_size)))
    
    if method == 'sklearn':
        voxels_in_clusters_by_timewindow = dict()
        for layerset in layersets:
            M = pn.MultilayerNetwork(aspects=1,fullyInterconnected=False)
            previous_voxels_in_clusters = dict()
            for tw_no in layerset:
                if not tw_no in voxels_in_clusters_by_timewindow:
                    voxels_in_clusters = dict()
                    model,voxellist = clustering.cluster_timewindow_scikit(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],n_clusters=n_clusters)
                    for ii,label in enumerate(model.labels_):
                        voxels_in_clusters.setdefault(label,[]).append(voxellist[ii])
                    voxels_in_clusters_by_timewindow[tw_no] = voxels_in_clusters
                else:
                    voxels_in_clusters = voxels_in_clusters_by_timewindow[tw_no]
                R = calculate_cluster_correlation_matrix(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],voxels_in_clusters)
                for ii in range(R.shape[0]):
                    node1 = str(voxels_in_clusters[ii])
                    for jj in range(ii+1,R.shape[1]):
                        node2 = str(voxels_in_clusters[jj])
                        if not np.isnan(R[ii,jj]):
                            M[node1,tw_no][node2,tw_no] = R[ii,jj]
                        else:
                            if nanlogfile != None:
                                with open(nanlogfile,'a+') as f:
                                    f.write('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
                            else:
                                print('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
                for cluster_number in voxels_in_clusters:
                    for previous_cluster_number in previous_voxels_in_clusters:
                        cluster_overlap = get_overlap(set(voxels_in_clusters[cluster_number]),set(previous_voxels_in_clusters[previous_cluster_number]))
                        M[str(previous_voxels_in_clusters[previous_cluster_number]),tw_no-1][str(voxels_in_clusters[cluster_number]),tw_no] = cluster_overlap
                previous_voxels_in_clusters = voxels_in_clusters # reference to the same object
                if calculate_consistency_while_clustering: # calculating spatial consistency of formed clusters
                    windowdata = imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]]
                    if '.' in consistency_save_path:
                        name,extension = consistency_save_path.split('.')
                        consistency_save_path_final = name + '_' + str(tw_no) + '.' + extension
                    else:
                        consistency_save_path_final = name + '_' + str(tw_no) + '.pkl'
                    calculate_spatial_consistency(windowdata,voxels_in_clusters,f_transform_consistency,n_consistency_CPUs,consistency_save_path_final)
            del(voxels_in_clusters_by_timewindow[min(voxels_in_clusters_by_timewindow)])
            yield M
            del(M)
    
    elif method == 'template':
        voxels_in_clusters = get_voxels_in_clusters(template)
        for layerset in layersets:
            M = pn.MultiplexNetwork(couplings='ordinal',fullyInterconnected=True)
            for tw_no in layerset:
                R = calculate_cluster_correlation_matrix(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],voxels_in_clusters)
                for ii in range(R.shape[0]):
                    node1 = str(voxels_in_clusters[ii])
                    for jj in range(ii+1,R.shape[1]):
                        node2 = str(voxels_in_clusters[jj])
                        if not np.isnan(R[ii,jj]):
                            M[node1,tw_no][node2,tw_no] = R[ii,jj]
                        else:
                            if nanlogfile != None:
                                with open(nanlogfile,'a+') as f:
                                    f.write('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
                            else:
                                print('NaN correlation at nodes '+node1+', '+node2+' at timewindow '+str(tw_no)+'\n')
                if calculate_consistency_while_clustering: #calculating spatial consistency of formed clusters
                    windowdata = imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]]
                    if '.' in consistency_save_path:
                        name,extension = consistency_save_path.split('.')
                        consistency_save_path_final = name + '_' + str(tw_no) + '.' + extension
                    else:
                        consistency_save_path_final = name + '_' + str(tw_no) + '.pkl'
                    calculate_spatial_consistency(windowdata,voxels_in_clusters,f_transform_consistency,n_consistency_CPUs,consistency_save_path_final)
            yield M
            del(M)
    elif method == 'consistency_optimized':
        voxels_in_clusters_by_timewindow = dict()
        for layerset in layersets:
            M = pn.MultilayerNetwork(aspects=1,fullyInterconnected=False)
            previous_voxels_in_clusters = dict()
            for tw_no in layerset:
                cfg = {'ROICentroids':ROI_centroids,'names':ROI_names,'imgdata':imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],
                       'threshold':consistency_threshold,'targetFunction':consistency_target_function,
                       'fTransform':f_transform_consistency,'nROIs':n_clusters,'template':template,
                       'percentageROIsForThresholding':consistency_percentage_ROIs_for_thresholding,'nCPUs':n_consistency_CPUs,
                       'nReHoNeighbors':n_ReHo_neighbors,'percentageMinCentroidDistance':percentage_min_centroid_distance,
                       'ReHoMeasure':ReHo_measure}
                if not tw_no in voxels_in_clusters_by_timewindow:
                    voxels_in_clusters = dict()
                    if ROI_centroids == 'random':
                        voxel_labels, voxel_coordinates = cbc.growOptimizedROIsInParallel(cfg,n_consistency_iters,n_consistency_CPUs)
                    else:
                        voxel_labels, voxel_coordinates, _ = cbc.growOptimizedROIs(cfg)
                    for ii, label in enumerate(voxel_labels):
                        voxels_in_clusters.setdefault(label,[]).append(voxel_coordinates[ii]) # voxels_in_clusters will contain label:[voxels with label] pairs; here, coordinates of each voxel are added to the correct list
                    if -1 in voxels_in_clusters:
                        del voxels_in_clusters[-1] # Voxels that are not located in any ROI (because of thresholding) have label -1. These should not be considered further in the pipeline.
                    voxels_in_clusters_by_timewindow[tw_no] = voxels_in_clusters
                else:
                    voxels_in_clusters = voxels_in_clusters_by_timewindow[tw_no]
                R = calculate_cluster_correlation_matrix(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],voxels_in_clusters)
                for ii in range(R.shape[0]):
                    node1 = str(voxels_in_clusters[ii])
                    for jj in range(ii+1,R.shape[1]):
                        node2 = str(voxels_in_clusters[jj])
                        if not np.isnan(R[ii,jj]):
                            M[node1,node2,tw_no] = R[ii,jj]
                        else:
                            if nanlogfile != None:
                                with open(nanlogfile,'a+') as f:
                                    f.write('NaN correlation at nodes ' + node1 + ',' + node2 + 'at timewindow ' + str(tw_no) + '/n')
                            else:
                                print('NaN correlation at nodes ' + node1 + ',' + node2 + 'at timewindow ' + str(tw_no) + '/n')
                for cluster_number in voxels_in_clusters:
                    for previous_cluster_number in previous_voxels_in_clusters:
                        # If previous_voxels_in_clusters is empty, this loop isn't performed at all
                        cluster_overlap = get_overlap(set(voxels_in_clusters[cluster_number]),set(previous_voxels_in_clusters[previous_cluster_number]))
                        M[str(previous_voxels_in_clusters[previous_cluster_number]),tw_no-1][str(voxels_in_clusters[cluster_number]),tw_no] = cluster_overlap
                previous_voxels_in_clusters = voxels_in_clusters
                if calculate_consistency_while_clustering:
                    windowdata = imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]]
                    if '.' in consistency_save_path:
                        name,extension = consistency_save_path.split('.')
                        consistency_save_path_final = name + '_' + str(tw_no) + '.' + extension
                    else:
                        consistency_save_path_final = name + '_' + str(tw_no) + '.pkl'
                    calculate_spatial_consistency(windowdata,voxels_in_clusters,f_transform_consistency,n_consistency_CPUs,consistency_save_path_final)
            del(voxels_in_clusters_by_timewindow[min(voxels_in_clusters_by_timewindow)])
            yield M
            del(M)
    elif method=='random_balls':
        voxels_in_clusters_by_timewindow = dict()
        for layerset in layersets:
            M = pn.MultilayerNetwork(aspects=1,fullyInterconnected=False)
            previous_voxels_in_clusters = dict()
            for tw_no in layerset:
                if not tw_no in voxels_in_clusters_by_timewindow:
                    voxels_in_clusters = dict()
                    voxel_labels, voxel_coordinates, _ = cbc.growSphericalROIs('random',imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],nROIs=n_clusters,template=template)
                    for ii, label in enumerate(voxel_labels):
                        voxels_in_clusters.setdefault(label,[]).append(voxel_coordinates[ii]) # voxels_in_clusters will contain label:[voxels with label] pairs; here, coordinates of each voxel are added to the correct list
                    if -1 in voxels_in_clusters:
                        del voxels_in_clusters[-1] # Voxels that are not located in any ROI (because of thresholding) have label -1. These should not be considered further in the pipeline.
                    voxels_in_clusters_by_timewindow[tw_no] = voxels_in_clusters
                else:
                    voxels_in_clusters = voxels_in_clusters_by_timewindow[tw_no]
                R = calculate_cluster_correlation_matrix(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],voxels_in_clusters)
                for ii in range(R.shape[0]):
                    node1 = str(voxels_in_clusters[ii])
                    for jj in range(ii+1,R.shape[1]):
                        node2 = str(voxels_in_clusters[jj])
                        if not np.isnan(R[ii,jj]):
                            M[node1,node2,tw_no] = R[ii,jj]
                        else:
                            if nanlogfile != None:
                                with open(nanlogfile,'a+') as f:
                                    f.write('NaN correlation at nodes ' + node1 + ',' + node2 + 'at timewindow ' + str(tw_no) + '/n')
                            else:
                                print('NaN correlation at nodes ' + node1 + ',' + node2 + 'at timewindow ' + str(tw_no) + '/n')
                for cluster_number in voxels_in_clusters:
                    for previous_cluster_number in previous_voxels_in_clusters:
                        # If previous_voxels_in_clusters is empty, this loop isn't performed at all
                        cluster_overlap = get_overlap(set(voxels_in_clusters[cluster_number]),set(previous_voxels_in_clusters[previous_cluster_number]))
                        M[str(previous_voxels_in_clusters[previous_cluster_number]),tw_no-1][str(voxels_in_clusters[cluster_number]),tw_no] = cluster_overlap
                previous_voxels_in_clusters = voxels_in_clusters
                if calculate_consistency_while_clustering:
                    windowdata = imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]]
                    if '.' in consistency_save_path:
                        name,extension = consistency_save_path.split('.')
                        consistency_save_path_final = name + '_' + str(tw_no) + '.' + extension
                    else:
                        consistency_save_path_final = name + '_' + str(tw_no) + '.pkl'
                    calculate_spatial_consistency(windowdata,voxels_in_clusters,f_transform_consistency,n_consistency_CPUs,consistency_save_path_final)
            del(voxels_in_clusters_by_timewindow[min(voxels_in_clusters_by_timewindow)])
    elif method=='craddock':
        voxels_in_clusters_by_timewindow = dict()
        for layerset in layersets:
            M = pn.MultilayerNetwork(aspects=1,fullyInterconnected=False)
            previous_voxels_in_clusters = dict()
            for tw_no in layerset:
                cfg = {'imgdata':imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],'threshold':consistency_threshold,'nROIs':n_clusters}
                if not tw_no in voxels_in_clusters_by_timewindow:
                    voxels_in_clusters = dict()
                    voxel_labels,voxel_coordinates = cbc.spectralNCutClustering(cfg)
                    if min(np.abs(voxel_labels))>0: # if cluster indexing starts from a non-zero value, let's fix it to start from zero
                        voxel_labels[np.where(voxel_labels)>0] -= np.min(voxel_labels[np.where(voxel_labels)>0])
                    for ii, label in enumerate(voxel_labels):
                        voxels_in_clusters.setdefault(label,[]).append(voxel_coordinates[ii]) # voxels_in_clusters will contain label:[voxels with label] pairs; here, coordinates of each voxel are added to the correct list
                    if -1 in voxels_in_clusters:
                        del voxels_in_clusters[-1] # Voxels that are not located in any ROI (because of thresholding) have label -1. These should not be considered further in the pipeline.
                    voxels_in_clusters_by_timewindow[tw_no] = voxels_in_clusters
                else:
                    voxels_in_clusters = voxels_in_clusters_by_timewindow[tw_no]
                R = calculate_cluster_correlation_matrix(imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]],voxels_in_clusters)
                for ii in range(R.shape[0]):
                    node1 = str(voxels_in_clusters[ii])
                    for jj in range(ii+1,R.shape[1]):
                        node2 = str(voxels_in_clusters[jj])
                        if not np.isnan(R[ii,jj]):
                            M[node1,node2,tw_no] = R[ii,jj]
                        else:
                            if nanlogfile != None:
                                with open(nanlogfile,'a+') as f:
                                    f.write('NaN correlation at nodes ' + node1 + ',' + node2 + 'at timewindow ' + str(tw_no) + '/n')
                            else:
                                print('NaN correlation at nodes ' + node1 + ',' + node2 + 'at timewindow ' + str(tw_no) + '/n')
                for cluster_number in voxels_in_clusters:
                    for previous_cluster_number in previous_voxels_in_clusters:
                        # If previous_voxels_in_clusters is empty, this loop isn't performed at all
                        cluster_overlap = get_overlap(set(voxels_in_clusters[cluster_number]),set(previous_voxels_in_clusters[previous_cluster_number]))
                        M[str(previous_voxels_in_clusters[previous_cluster_number]),tw_no-1][str(voxels_in_clusters[cluster_number]),tw_no] = cluster_overlap
                previous_voxels_in_clusters = voxels_in_clusters
                if calculate_consistency_while_clustering:
                    windowdata = imgdata[:,:,:,start_times[tw_no]:end_times[tw_no]]
                    if '.' in consistency_save_path:
                        name,extension = consistency_save_path.split('.')
                        consistency_save_path_final = name + '_' + str(tw_no) + '.' + extension
                    else:
                        consistency_save_path_final = name + '_' + str(tw_no) + '.pkl'
                    calculate_spatial_consistency(windowdata,voxels_in_clusters,f_transform_consistency,n_consistency_CPUs,consistency_save_path_final)
            yield M
            del(M)
    else:
        raise NotImplementedError('Not implemented')
예제 #19
0
def sample_multilayer_subgraphs_esu(network,results,sizes=None,intersections=None,nnodes=None,nlayers=None,p=None,seed=None,intersection_type="strict",copy_network=True,custom_check_function=None):
    u"""A one-aspect multilayer version of the Rand-EnumerateSubgraphs (Rand-ESU) algorithm
    introduced by Wernicke [1].
    
    Uniformly samples induced subgraphs of the form [nodelist][layerlist] which
    fulfill the given requirements. Each subgraph is sampled at probability
    product(p), where p is the parameter p. If all entries in p are 1, all
    such induced subgraphs in the network are found.
    
    Parameters
    ----------
    Multiple parameters can be given by the user, some of which are always required,
    some of which are sometimes required, and some of which are mutually
    exclusive. There are multiple functionalities to this function, and the choice
    is done based on the parameters passed by the user. For a description of all
    of them, see section Usage.
    
    network : MultilayerNetwork
        The multilayer network to be analyzed.
    results : list or callable
        The method of outputting the found induced subgraphs. If a list, then
        the induced subgraphs are appended to it as ([nodelist],[layerlist]) tuples.
        The results list or the [nodelist] or [layerlist] lists are not guaranteed to
        be in any specific order.
        If a callable, when an acceptable induced subgraph is found, this callable
        is called with the argument ([nodelist],[layerlist]) (that is, one argument
        which is a tuple of two lists). The callable should therefore take only one
        required parameter in the form of a tuple. If you want to pass more parameters
        to the callable, do so via e.g. an anonymous function.
    sizes : list of ints > 0
        How many nodes should be on each layer of an acceptable induced subgraph.
        One integer for each layer of an acceptable subgraph.
    intersections : list of ints >= 0 or Nones, or int
        How many nodes should be shared between sets of layers in an acceptable
        induced subgraph. If list, if an entry in the list is None, any number of shared
        nodes is accepted. The order of the intersections is taken to follow the
        order of layers in sizes, with two-layer intersections being listed first,
        then three-layer intersections, etc. If int, then this is taken to mean
        the intersection between ALL layers, and the other intersections can be anything.
        If this is an int with value x, it is equivalent to being a list with
        [None,None,...,None,x].
        For more details, see section "Constructing the requirements" in the documentation
        of the function default_check_reqs.
    nnodes : int
        How many nodes an acceptable subgraph should have. If not provided and
        sizes and intersections are provided, it
        will be calculated based on the sizes and intersections parameters.
        Required if there are Nones in intersections or if intersection_type
        is not "strict". If you cannot guarantee the correctness of this
        number, do not use this parameter.
    nlayers : int
        How many layers an acceptable subgraph should have. If not provided and
        sizes and intersections are provided,
        it will be calculated based on the sizes and intersections requirements.
        If you cannot guarantee the correctness of this number, do not use this
        parameter.
    p : list of floats 0 <= p <= 1
        List of sampling probabilities at each depth. If None, p = 1 for each
        depth is used. The probability of sampling a given induced subgraph is
        the product of the elements of p.
        It is up to the user to provide a p of correct length to
        match the depth at which desired induced subgraphs are found.
        If you know how many nodes and layers an acceptable induced subgraph should
        have (nnodes and nlayers, respectively), you can calculate the length of p by:
        len(p) = nnodes - 1 + nlayers - 1 + 1.
        This formula follows from the fact that finding an induced subgraph starts
        from a nodelayer (the + 1), and then each node and each layer have to be added
        one at a time to the nodelist and layerlist, respectively
        (nnodes - 1 and nlayers - 1, respectively). Starting from a nodelayer means
        that both nodelist and layerlist are of length 1 when the expansion of the
        subgraph is started, hence the - 1's.
    seed : int, str, bytes or bytearray
        Seed for Rand-ESU.
    intersection_type : string, "strict" or "less_or_equal"
        If intersection_type is "strict", all intersections must be exactly equal
        to entries in the intersections parameter. If intersection_type is
        "less_or_equal", an intersection is allowed to be less than or equal to the corresponding
        entry in the intersections parameter. Usage is case-sensitive.
    copy_network : boolean
        Determines whether the network is copied at the beginning of execution. If True (default),
        the network is copied and the copy is modified during the execution (the original
        network is not modified). If False, the network is not copied, and the network is
        NOT modified during the execution.
        The copying takes more memory but results in faster execution times - the default
        value (True) is the recommended setting. The modification of the copy does not affect
        the edges in the induced subgraphs that are passed to the check function. During the
        execution, if this parameter is True, as starting nodelayers are iterated through in their numerical order,
        after a nodelayer has been iterated over all edges leading to it are removed (at this
        point, it is impossible to reach the nodelayer from subsequent starting nodelayers in
        any case). Therefore, if you use a custom_check_function whose return value depends
        also on the edges OUTSIDE the induced subgraph to be tested, set this parameter to False.
    custom_check_function : callable or None
        If not None, this will be used to determine whether an induced subgraph is okay or not
        (instead of one of the built-in check functions).
        The algorithm finds induced subgraphs which have the given nnodes and nlayers, and which
        have a path spanning the induced subgraph (but are not necessarily connected). The algorithm
        then passes these to the check function, which determines whether the subgraph is acceptable
        or not. The arguments that are passed to your custom check function are the network, the nodelist
        of the induced subgraph, and the layerlist of the induced subgraph (three parameters, in this
        order). Your check function should therefore accept exactly three parameters. If you want to pass
        more parameters to the check function, do so via e.g. an anonymous function.
        If copy_network is True, the passed network is the copy of the network, which might have
        edges removed OUTSIDE of the induced subgraph (the edges inside the induced subgraph are identical
        to the original network's). The function should return True or False (the subgraph is acceptable
        or not acceptable, respectively). When this parameter is not None, you must also specify nnodes
        and nlayers.
        
    Usage
    -----
    There are multiple functionalities built-in, and determining which is used is
    done by checking which parameters have been given by the user.
    
    If you want to find induced subgraphs (ISs) that have a specified number of nodes
    on each layer and have specific intersections between layers, provide:
        - network
        - results
        - sizes
        - intersections as list without Nones
        
    If you want to find ISs that have a specific number of nodes on each layer and
    have some specific intersections between layers, and some intersections that can
    be of any cardinality, provide:
        - network
        - results
        - sizes
        - intersections as list with Nones in the elements where intersection cardinalities can be anything (even all elements can be Nones)
        - nnodes
        
    If you want to find ISs that have a specific number of nodes on each layer and
    have intersections that have at most specific cardinalities, provide:
        - network
        - results
        - sizes
        - intersections as list without Nones
        - nnodes
        - intersection_type = "less_or_equal"
        
    If you want to find ISs that have a specific number of nodes on each layer and
    have some specific intersections that have at most specific cardinalities, and some
    intersections that can be of any cardinality, provide:
        - network
        - results
        - sizes
        - intersections as list with Nones in the elements where intersection cardinalities can be anything (even all intersections can be Nones)
        - nnodes
        - intersection_type = "less_or_equal"
        
    If you want to find ISs that have a specific number of nodes on each layer and
    have a specific intersection between ALL layers (the other intersections can be anything),
    provide:
        - network
        - results
        - sizes
        - nnodes
        - intersections as int
        
    If you want to find ISs that have a specific number of nodes and a specific number
    of layers, provide:
        - network
        - results
        - nnodes
        - nlayers
        
    If you want to define your own function to determine when an IS is acceptable,
    provide:
        - network
        - results
        - nnodes
        - nlayers
        - custom_check_function
    
    For all of the above uses, if you don't want to find all ISs but only sample a
    portion of them, also provide:
        - p
    
    Of the above uses, the first five use the default_check_reqs function for checking
    subgraph validity, the sixth uses the relaxed_check_reqs function, and the seventh
    uses the user-supplied checking function.
    
    Example
    -------
    Suppose we have the multilayer network N:
    
    (1,'X')----(2,'X')----(3,'X')
                  |
                  |
               (2,'Y')
    
    where (a,b) are nodelayer tuples with a = node identity and b = layer identity.
    After calling
    
    >>> results = []
    >>> sample_multilayer_subgraphs_esu(N,results,[2,1],[1])
    
    the results list looks like [([1,2],['X','Y']),([2,3],['X','Y'])] (or some other
    order of tuples and [nodelist] and [layerlist] inside the tuples, since the output
    is not guaranteed to be in any specific order).
    
    After calling
    
    >>> results = []
    >>> sample_multilayer_subgraphs_esu(N,results,nnodes=3,nlayers=1)
    
    the results list looks like [([1,2,3],['X'])] (or again, some other ordering).
    
    Further reading
    ---------------
    The documentation of the functions default_check_reqs, default_calculate_required_lengths
    and relaxed_check_reqs offer more insight into what are considered acceptable induced subgraphs
    in different cases in the functionalities described in the Usage section. You should read these
    if you are not sure what you want to do or how to do it after reading this documentation.
    
    References
    ----------
    [1] "A Faster Algorithm for Detecting Network Motifs", S. Wernicke, WABI. Vol. 3692, pp. 165-177. Springer 2005.
    """
    if copy_network == True:
        network_copy = pymnet.subnet(network,network.get_layers(aspect=0),network.get_layers(aspect=1),newNet=pymnet.MultilayerNetwork(aspects=1,fullyInterconnected=False))
    else:
        network_copy = network
        
    if seed == None:
        random.seed()
    else:
        random.seed(seed)

    check_function = None
    assert (sizes != None and intersections != None) or (nnodes != None and nlayers != None), "Please provide either sizes and intersections or nnodes and nlayers"
    if custom_check_function != None:
        assert nnodes != None and nlayers != None, "Please provide nnodes and nlayers when using a custom check function"
        req_nodelist_len = nnodes
        req_layerlist_len = nlayers
        check_function = custom_check_function
    if sizes != None and intersections != None and check_function == None:
        if isinstance(intersections,list):
            if None in intersections:
                assert nnodes != None, "Please provide nnodes if including Nones in intersections"
                req_nodelist_len = nnodes
                req_layerlist_len = len(sizes)
            else:
                if intersection_type == "strict":
                    assert nnodes == None and nlayers == None, "You cannot provide both sizes and intersections and nnodes and nlayers, if intersections is a list"
                    req_nodelist_len, req_layerlist_len = default_calculate_required_lengths(sizes,intersections)
                elif intersection_type == "less_or_equal":
                    assert nnodes != None and nlayers == None, "please provide nnodes (and not nlayers) if using less_or_equal intersection type"
                    req_nodelist_len = nnodes
                    req_layerlist_len = len(sizes)
            check_function = lambda x,y,z: default_check_reqs(x,y,z,sizes,intersections,req_nodelist_len,req_layerlist_len,intersection_type)
        elif isinstance(intersections,int):
            assert intersections >= 0, "Please provide nonnegative common intersection size"
            assert nnodes != None and nlayers == None, "When requiring only common intersection size, please provide nnodes (and not nlayers)"
            req_nodelist_len = nnodes
            req_layerlist_len = len(sizes)
            intersections_as_list = [None]*(2**len(sizes)-len(sizes)-1)
            intersections_as_list[-1] = intersections
            check_function = lambda x,y,z: default_check_reqs(x,y,z,sizes,intersections_as_list,req_nodelist_len,req_layerlist_len,intersection_type)
    if nnodes != None and nlayers != None and check_function == None:
        assert sizes == None and intersections == None, "You cannot provide both sizes and intersections and nnodes and nlayers, if intersections is a list"
        req_nodelist_len = nnodes
        req_layerlist_len = nlayers
        assert isinstance(req_nodelist_len,int) and isinstance(req_layerlist_len,int), "Non-integer nnodes or nlayers"
        assert req_nodelist_len > 0 and req_layerlist_len > 0, "Nonpositive nnodes or nlayers"
        check_function = relaxed_check_reqs
    assert check_function != None, "Please specify a valid combination of parameters to determine method of subgraph validity checking"
        
    if p == None:
        p = [1] * (req_nodelist_len-1 + req_layerlist_len-1 + 1)
        
    depth = 0
    numberings = dict()
    inverse_numberings = dict()
    for index,nodelayer in enumerate(network_copy.iter_node_layers()):
        numberings[nodelayer] = index
    for nodelayer in numberings:
        inverse_numberings[numberings[nodelayer]] = nodelayer

    for indexnumber in range(len(numberings)):
        v = inverse_numberings[indexnumber]
        if random.random() < p[depth]:
            start_node = v[0]
            start_layer = v[1]
            V_extension_nodes = set()
            V_extension_layers = set()
            for neighbor in network_copy[v]:
                if numberings[neighbor] > numberings[v]:
                    no_node_conflicts = True
                    no_layer_conflicts = True
                    node = neighbor[0]
                    layer = neighbor[1]
                    if (node,start_layer) in numberings and numberings[(node,start_layer)] < numberings[v]:
                        no_node_conflicts = False
                    if (start_node,layer) in numberings and numberings[(start_node,layer)] < numberings[v]:
                        no_layer_conflicts = False
                    if (node != start_node
                        and no_node_conflicts
                        and node not in V_extension_nodes):
                        V_extension_nodes.add(node)
                    if (layer != start_layer
                        and no_layer_conflicts
                        and layer not in V_extension_layers):
                        V_extension_layers.add(layer)
            _extend_subgraph(network_copy,[start_node],[start_layer],check_function,V_extension_nodes,V_extension_layers,numberings,v,req_nodelist_len,req_layerlist_len,depth+1,p,results)
        if copy_network == True:
            for neighbor in list(network_copy[v]):
                network_copy[neighbor][v] = 0
예제 #20
0
class test_network_io(unittest.TestCase):

    timeseries0 = np.array(range(-10, 11), dtype=np.float64)
    timeseries1 = np.array([
        -0.46362334, -0.56982772, 0.06455791, -0.44209878, -0.31068497,
        0.05360425, -0.41299186, -0.29082169, -0.07190158, -0.12474256,
        -0.24997589, 0.01267206, 0.03601663, 0.29330202, -0.12646342,
        0.13130587, 0.57496159, 0.77851974, 0.12816724, 0.63563011, 0.35058168
    ],
                           dtype=np.float64)
    timeseries2 = np.array(range(100, 121), dtype=np.float64)
    timeseries3 = np.zeros((21, ))
    timeseries4 = np.array(
        [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1],
        dtype=np.float64)
    timeseries5 = np.array(range(0, -21, -1))

    timeseries6 = np.copy(timeseries0)
    timeseries7 = np.copy(timeseries1)
    timeseries8 = np.copy(timeseries2)
    timeseries9 = np.copy(timeseries3)
    timeseries10 = np.copy(timeseries4)
    timeseries11 = np.copy(timeseries5)

    imgdata = np.block([[[[timeseries0], [timeseries1]],
                         [[timeseries2], [timeseries3]],
                         [[timeseries4], [timeseries5]]],
                        [[[timeseries6], [timeseries7]],
                         [[timeseries8], [timeseries9]],
                         [[timeseries10], [timeseries11]]]])

    pickle_test_mplex = pn.MultiplexNetwork(couplings='ordinal',
                                            fullyInterconnected=True)
    pickle_test_mplex['(1, 2, 3)', 0]['(2, 3, 4)', 0] = 0.5
    pickle_test_mplex['(2, 3, 4)', 1]['(3, 4, 5)', 1] = 0.999
    pickle_test_mplex['(1, 2, 3)', 1]['(2, 3, 4)', 1] = 0.001
    pickle_test_mplex['(1, 2, 3)', 1]['(3, 4, 5)', 1] = 0.234
    pickle_test_mplex['(3, 4, 5)', 2]['(1, 2, 3)', 2] = 1

    pickle_test_mlayer = pn.MultilayerNetwork(aspects=1,
                                              fullyInterconnected=False)
    pickle_test_mlayer['[(1, 2, 3),(2, 3, 4)]', 9]['[(3, 4, 5)]', 9] = 0.123
    pickle_test_mlayer['[(1, 2, 3)]', 10]['[(2, 3, 4),(3, 4, 5)]', 10] = 0.456
    pickle_test_mlayer['[(1, 2, 3),(2, 3, 4)]', 9]['[(1, 2, 3)]', 10] = 0.5
    pickle_test_mlayer['[(1, 2, 3),(2, 3, 4)]', 9]['[(2, 3, 4),(3, 4, 5)]',
                                                   10] = 0.333
    pickle_test_mlayer['[(3, 4, 5)]', 9]['[(1, 2, 3)]', 10] = 0
    pickle_test_mlayer['[(3, 4, 5)]', 9]['[(2, 3, 4),(3, 4, 5)]', 10] = 0.5
    pickle_test_mlayer['[(4,5,6)]', 10]['[(2, 3, 4),(3, 4, 5)]', 10] = 0.01
    pickle_test_mlayer['[(2, 3, 4)]', 11]['[(2, 3, 4),(3, 4, 5)]', 10] = 0.999

    def round_edge_weights(self, M):
        # rounds edge weights to 10 decimals
        if isinstance(M, pn.net.MultiplexNetwork):
            for edge in M.edges:
                if edge[2] == edge[3]:
                    M[edge[0], edge[2]][edge[1], edge[3]] = round(edge[4], 10)
            return M
        else:
            for edge in M.edges:
                M[edge[0], edge[2]][edge[1], edge[3]] = round(edge[4], 10)
            return M

    def test_write_weighted_network(self):
        M_multiplex = pn.MultiplexNetwork(couplings='ordinal',
                                          fullyInterconnected=True)
        M_multiplex['(1, 2, 3)', 0]['(2, 3, 4)', 0] = 0.5
        M_multiplex['(2, 3, 4)', 1]['(3, 4, 5)', 1] = 0.999
        M_multiplex['(3, 4, 5)', 2]['(1, 2, 3)', 2] = 1
        possible_nodelines = set([
            '(1, 2, 3);(2, 3, 4);(3, 4, 5)\n',
            '(1, 2, 3);(3, 4, 5);(2, 3, 4)\n',
            '(2, 3, 4);(1, 2, 3);(3, 4, 5)\n',
            '(2, 3, 4);(3, 4, 5);(1, 2, 3)\n',
            '(3, 4, 5);(1, 2, 3);(2, 3, 4)\n',
            '(3, 4, 5);(2, 3, 4);(1, 2, 3)\n'
        ])
        possible_layerlines = set(
            ['0;1;2\n', '0;2;1\n', '1;0;2\n', '1;2;0\n', '2;0;1\n', '2;1;0\n'])
        edgeset = set([
            '(1, 2, 3);(2, 3, 4);0;0;0.5\n', '(1, 2, 3);(1, 2, 3);0;1;1.0\n',
            '(1, 2, 3);(1, 2, 3);1;2;1.0\n', '(1, 2, 3);(3, 4, 5);2;2;1\n',
            '(2, 3, 4);(2, 3, 4);0;1;1.0\n', '(2, 3, 4);(3, 4, 5);1;1;0.999\n',
            '(2, 3, 4);(2, 3, 4);1;2;1.0\n', '(3, 4, 5);(3, 4, 5);0;1;1.0\n',
            '(3, 4, 5);(3, 4, 5);1;2;1.0\n'
        ])
        network_io.write_weighted_network(
            M_multiplex, 'test_for_network_writing_WILL_BE_REMOVED.txt',
            'Created by test_write_weighted_network')
        try:
            with open('test_for_network_writing_WILL_BE_REMOVED.txt',
                      'r') as f:
                self.assertEqual(f.readline(), '# Multiplex\n')
                self.assertEqual(f.readline(),
                                 '# Created by test_write_weighted_network\n')
                self.assertEqual(f.readline(), 'nodes:\n')
                self.assertTrue(f.readline() in possible_nodelines)
                self.assertEqual(f.readline(), 'layers:\n')
                self.assertTrue(f.readline() in possible_layerlines)
                self.assertEqual(f.readline(), 'edges:\n')
                for line in f:
                    self.assertTrue(line in edgeset)
                    edgeset.remove(line)
                self.assertEqual(len(edgeset), 0)
                self.assertEqual(f.readline(), '')
        finally:
            os.remove('test_for_network_writing_WILL_BE_REMOVED.txt')

        M_multilayer = pn.MultilayerNetwork(aspects=1,
                                            fullyInterconnected=False)
        M_multilayer['[(1, 2, 3),(2, 3, 4)]', 0]['[(3, 4, 5)]', 0] = 0.123
        M_multilayer['[(1, 2, 3)]', 1]['[(2, 3, 4),(3, 4, 5)]', 1] = 0.456
        M_multilayer['[(1, 2, 3),(2, 3, 4)]', 0]['[(1, 2, 3)]', 1] = 0.5
        M_multilayer['[(1, 2, 3),(2, 3, 4)]', 0]['[(2, 3, 4),(3, 4, 5)]',
                                                 1] = 0.333
        M_multilayer['[(3, 4, 5)]', 0]['[(1, 2, 3)]', 1] = 0
        M_multilayer['[(3, 4, 5)]', 0]['[(2, 3, 4),(3, 4, 5)]', 1] = 0.5
        possible_nodelines = set([
            x[0] + ';' + x[1] + ';' + x[2] + ';' + x[3] + '\n'
            for x in itertools.permutations([
                '[(1, 2, 3)]', '[(1, 2, 3),(2, 3, 4)]',
                '[(2, 3, 4),(3, 4, 5)]', '[(3, 4, 5)]'
            ])
        ])
        possible_layerlines = set(['0;1\n', '1;0\n'])
        edgeset = set([
            '[(1, 2, 3),(2, 3, 4)];[(2, 3, 4),(3, 4, 5)];0;1;0.333\n',
            '[(1, 2, 3),(2, 3, 4)];[(3, 4, 5)];0;0;0.123\n',
            '[(1, 2, 3),(2, 3, 4)];[(1, 2, 3)];0;1;0.5\n',
            '[(3, 4, 5)];[(2, 3, 4),(3, 4, 5)];0;1;0.5\n',
            '[(1, 2, 3)];[(2, 3, 4),(3, 4, 5)];1;1;0.456\n'
        ])
        network_io.write_weighted_network(
            M_multilayer, 'test_for_network_writing_WILL_BE_REMOVED.txt',
            'Created by test_write_weighted_network')
        try:
            with open('test_for_network_writing_WILL_BE_REMOVED.txt',
                      'r') as f:
                self.assertEqual(f.readline(), '# Multilayer\n')
                self.assertEqual(f.readline(),
                                 '# Created by test_write_weighted_network\n')
                self.assertEqual(f.readline(), 'nodes:\n')
                self.assertTrue(f.readline() in possible_nodelines)
                self.assertEqual(f.readline(), 'layers:\n')
                self.assertTrue(f.readline() in possible_layerlines)
                self.assertEqual(f.readline(), 'edges:\n')
                for line in f:
                    self.assertTrue(line in edgeset)
                    edgeset.remove(line)
                self.assertEqual(len(edgeset), 0)
                self.assertEqual(f.readline(), '')
        finally:
            os.remove('test_for_network_writing_WILL_BE_REMOVED.txt')

    def test_read_weighted_network(self):
        M_multiplex = network_construction.make_multiplex(self.imgdata,
                                                          timewindow=7,
                                                          overlap=2)
        network_io.write_weighted_network(
            M_multiplex, 'test_for_network_reading_WILL_BE_REMOVED.txt',
            'Created by test_read_weighted_network')
        M_multiplex_read = network_io.read_weighted_network(
            'test_for_network_reading_WILL_BE_REMOVED.txt')
        try:
            self.assertEqual(self.round_edge_weights(M_multiplex),
                             self.round_edge_weights(M_multiplex_read))
        finally:
            os.remove('test_for_network_reading_WILL_BE_REMOVED.txt')

        M_multilayer = network_construction.make_clustered_multilayer(
            self.imgdata, timewindow=7, overlap=2, n_clusters=3)
        network_io.write_weighted_network(
            M_multilayer, 'test_for_network_reading_WILL_BE_REMOVED.txt',
            'Created by test_read_weighted_network')
        M_multilayer_read = network_io.read_weighted_network(
            'test_for_network_reading_WILL_BE_REMOVED.txt')
        try:
            self.assertEqual(self.round_edge_weights(M_multilayer),
                             self.round_edge_weights(M_multilayer_read))
        finally:
            os.remove('test_for_network_reading_WILL_BE_REMOVED.txt')

    def test_pickle_file_io_for_networks(self):
        try:
            network_io.write_pickle_file(
                self.pickle_test_mplex,
                'test_for_pickle_io_mplex_network_WILL_BE_REMOVED.pkl')
            network_io.write_pickle_file(
                self.pickle_test_mlayer,
                'test_for_pickle_io_mlayer_network_WILL_BE_REMOVED.pkl')
            pickle_test_mplex_read = network_io.read_pickle_file(
                'test_for_pickle_io_mplex_network_WILL_BE_REMOVED.pkl')
            pickle_test_mlayer_read = network_io.read_pickle_file(
                'test_for_pickle_io_mlayer_network_WILL_BE_REMOVED.pkl')
            self.assertEqual(self.pickle_test_mplex, pickle_test_mplex_read)
            self.assertEqual(self.pickle_test_mlayer, pickle_test_mlayer_read)
        finally:
            if os.path.exists(
                    'test_for_pickle_io_mplex_network_WILL_BE_REMOVED.pkl'):
                os.remove(
                    'test_for_pickle_io_mplex_network_WILL_BE_REMOVED.pkl')
            if os.path.exists(
                    'test_for_pickle_io_mlayer_network_WILL_BE_REMOVED.pkl'):
                os.remove(
                    'test_for_pickle_io_mlayer_network_WILL_BE_REMOVED.pkl')

    def test_write_layersetwise_network(self):
        try:
            os.mkdir('dir_for_test_write_layersetwise_network_WILL_BE_REMOVED')
            network_io.write_layersetwise_network(
                self.pickle_test_mplex,
                'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED')
            network_io.write_layersetwise_network(
                self.pickle_test_mlayer,
                'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED')
            self.assertEqual(
                sorted(
                    os.listdir(
                        'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED'
                    )), ['0_1_2', '9_10_11'])
            mplex = network_io.read_pickle_file(
                'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED/0_1_2'
            )
            mlayer = network_io.read_pickle_file(
                'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED/9_10_11'
            )
            self.assertEqual(self.pickle_test_mplex, mplex)
            self.assertEqual(self.pickle_test_mlayer, mlayer)
        finally:
            if os.path.exists(
                    'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED/0_1_2'
            ):
                os.remove(
                    'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED/0_1_2'
                )
            if os.path.exists(
                    'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED/9_10_11'
            ):
                os.remove(
                    'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED/9_10_11'
                )
            if os.path.exists(
                    'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED'):
                os.rmdir(
                    'dir_for_test_write_layersetwise_network_WILL_BE_REMOVED')
예제 #21
0
def relabel(net,nodeNames=None,layerNames=None):
    """Returns a copy of the network with nodes and layers relabeled.
    
     Parameters
     ----------
     net : MultilayerNetwork, or MultiplexNetwork 
        The original network.
     nodeNames : None, or dict
        The map from node names to node indices.
     layerNames : None, dict, or sequence of dicts
        The map(s) from (elementary) layer names to (elementary) layer indices.
        Note that you can add empty dicts for aspects you do not want to relabel.

     Return
     ------
     newnet : type(net)
         The normalized network.
    """
    def dget(d,e):
        if e in d:
            return d[e]
        else:
            return e

    def layer_to_indexlayer(layer,layerNames):
        return tuple([dget(layerNames[i],elayer) for i,elayer in enumerate(layer)])

    if nodeNames==None:
        nodeNames={}

    if layerNames==None:
        layerNames=[]

    if net.aspects==1:
        if isinstance(layerNames,dict):
            layerNames=[layerNames]

    for aspect in range(net.aspects):
        if len(layerNames)<aspect+1:
            layerNames.append({})
     
    if type(net)==netmodule.MultilayerNetwork:
        newNet=netmodule.MultilayerNetwork(aspects=net.aspects,
                                 noEdge=net.noEdge,
                                 directed=net.directed,
                                 fullyInterconnected=net.fullyInterconnected)
    elif type(net)==netmodule.MultiplexNetwork:
        newNet=netmodule.MultiplexNetwork(couplings=net.couplings,
                                directed=net.directed,
                                noEdge=net.noEdge,
                                fullyInterconnected=net.fullyInterconnected)
    else:
        raise Exception("Invalid type of net",type(net))

    for node in net:
        newNet.add_node(dget(nodeNames,node))
    for aspect in range(net.aspects):
        for layer in net.slices[aspect+1]:
            newNet.add_layer(dget(layerNames[aspect],layer),aspect=aspect+1) 

    if not net.fullyInterconnected:
        for nodelayer in net.iter_node_layers():
            layer=layer_to_indexlayer(nodelayer[1:],layerNames)
            if net.aspects==1:
                layer=layer[0]
            newNet.add_node(dget(nodeNames,nodelayer[0]),layer=layer)

    if type(net)==netmodule.MultilayerNetwork:
        for edge in net.edges:
            newedge=[dget(nodeNames,edge[0]),dget(nodeNames,edge[1])]
            for aspect in range(net.aspects):
                newedge.append(dget(layerNames[aspect],edge[2+aspect*2]))
                newedge.append(dget(layerNames[aspect],edge[2+aspect*2+1]))
            newNet[tuple(newedge)]=edge[-1]
    elif type(net)==netmodule.MultiplexNetwork:
            for layer in net.iter_layers():
                if net.aspects==1:
                    layertuple=(layer,)
                else:
                    layertuple=layer
                for node in net.A[layer]:
                    for neigh in net.A[layer][node]:
                        newNet[(dget(nodeNames,node),dget(nodeNames,neigh))+layer_to_indexlayer(layertuple,layerNames)]=net[(node,neigh)+layertuple]

                            
    return newNet
def yield_clustered_multilayer_network_in_layersets(imgdata,
                                                    layerset_size,
                                                    timewindow=100,
                                                    overlap=0,
                                                    n_clusters=100,
                                                    method='sklearn',
                                                    template=None,
                                                    nanlogfile=None,
                                                    event_time_stamps=None):
    # If event_time_stamps is specified, then they are used to compute start_times, end_times and k (and timewindow and overlap are ignored).
    # Otherwise, timewindow and overlap are used ot compute start_times, end_times and k.
    if event_time_stamps == None:
        k = get_number_of_layers(imgdata.shape, timewindow, overlap)
        start_times, end_times = get_start_and_end_times(
            k, timewindow, overlap)
    else:
        assert isinstance(event_time_stamps, list)
        k = len(event_time_stamps) + 1
        start_times = [0] + event_time_stamps
        end_times = event_time_stamps + [imgdata.shape[3]]
    layersets = zip(*(range(k)[ii:] for ii in range(layerset_size)))

    if method == 'sklearn':
        voxels_in_clusters_by_timewindow = dict()
        for layerset in layersets:
            M = pn.MultilayerNetwork(aspects=1, fullyInterconnected=False)
            previous_voxels_in_clusters = dict()
            for tw_no in layerset:
                if not tw_no in voxels_in_clusters_by_timewindow:
                    voxels_in_clusters = dict()
                    model, voxellist = clustering.cluster_timewindow_scikit(
                        imgdata[:, :, :, start_times[tw_no]:end_times[tw_no]],
                        n_clusters=n_clusters)
                    for ii, label in enumerate(model.labels_):
                        voxels_in_clusters.setdefault(label,
                                                      []).append(voxellist[ii])
                    voxels_in_clusters_by_timewindow[
                        tw_no] = voxels_in_clusters
                else:
                    voxels_in_clusters = voxels_in_clusters_by_timewindow[
                        tw_no]
                R = calculate_cluster_correlation_matrix(
                    imgdata[:, :, :, start_times[tw_no]:end_times[tw_no]],
                    voxels_in_clusters)
                for ii in range(R.shape[0]):
                    node1 = str(voxels_in_clusters[ii])
                    for jj in range(ii + 1, R.shape[1]):
                        node2 = str(voxels_in_clusters[jj])
                        if not np.isnan(R[ii, jj]):
                            M[node1, tw_no][node2, tw_no] = R[ii, jj]
                        else:
                            if nanlogfile != None:
                                with open(nanlogfile, 'a+') as f:
                                    f.write('NaN correlation at nodes ' +
                                            node1 + ', ' + node2 +
                                            ' at timewindow ' + str(tw_no) +
                                            '\n')
                            else:
                                print('NaN correlation at nodes ' + node1 +
                                      ', ' + node2 + ' at timewindow ' +
                                      str(tw_no) + '\n')
                for cluster_number in voxels_in_clusters:
                    for previous_cluster_number in previous_voxels_in_clusters:
                        cluster_overlap = get_overlap(
                            set(voxels_in_clusters[cluster_number]),
                            set(previous_voxels_in_clusters[
                                previous_cluster_number]))
                        M[str(previous_voxels_in_clusters[
                            previous_cluster_number]),
                          tw_no - 1][str(voxels_in_clusters[cluster_number]),
                                     tw_no] = cluster_overlap
                previous_voxels_in_clusters = voxels_in_clusters  # reference to the same object
            del (voxels_in_clusters_by_timewindow[min(
                voxels_in_clusters_by_timewindow)])
            yield M
            del (M)

    elif method == 'template':
        voxels_in_clusters = get_voxels_in_clusters(template)
        for layerset in layersets:
            M = pn.MultiplexNetwork(couplings='ordinal',
                                    fullyInterconnected=True)
            for tw_no in layerset:
                R = calculate_cluster_correlation_matrix(
                    imgdata[:, :, :, start_times[tw_no]:end_times[tw_no]],
                    voxels_in_clusters)
                for ii in range(R.shape[0]):
                    node1 = str(voxels_in_clusters[ii])
                    for jj in range(ii + 1, R.shape[1]):
                        node2 = str(voxels_in_clusters[jj])
                        if not np.isnan(R[ii, jj]):
                            M[node1, tw_no][node2, tw_no] = R[ii, jj]
                        else:
                            if nanlogfile != None:
                                with open(nanlogfile, 'a+') as f:
                                    f.write('NaN correlation at nodes ' +
                                            node1 + ', ' + node2 +
                                            ' at timewindow ' + str(tw_no) +
                                            '\n')
                            else:
                                print('NaN correlation at nodes ' + node1 +
                                      ', ' + node2 + ' at timewindow ' +
                                      str(tw_no) + '\n')
            yield M
            del (M)
    else:
        raise NotImplementedError('Not implemented')
예제 #23
0
    "hsa.mir.511", "hsa.mir.378c", "hsa.mir.206", "hsa.mir.103a.1",
    "hsa.mir.1247", "hsa.mir.1185.2", "hsa.mir.3174", "hsa.mir.1295a",
    "hsa.mir.141", "hsa.mir.155", "hsa.mir.493", "hsa.mir.215"
]

protein_name = [
    "Bax", "GSK3.alpha.beta", "E.Cadherin", "Rab11", "Caveolin.1",
    "Collagen_VI", "c.Myc", "PKC.alpha", "GAPDH", "P.Cadherin", "PDK1",
    "XBP1.G.C", "CDK1", "c.Kit", "Syk", "p27", "Shc_pY317", "mTOR", "AR",
    "Ku80", "Rictor", "MSH6", "14.3.3_zeta", "PI3K.p85", "Chk2_pT68",
    "Bap1.c.4", "Lck", "STAT3_pY705", "S6_pS240_S244", "ATM", "Src_pY416",
    "14.3.3_epsilon", "Bak", "N.Ras", "Myosin.IIa_pS1943", "LKB1", "YB.1",
    "p70S6K", "Claudin.7", "Annexin.1", "4E.BP1_pT37_T46", "MIG.6", "FOXO3a"
]

net = pymnet.MultilayerNetwork(aspects=1, fullyInterconnected=False)


#---------------------------------------------------------------------------------------------------------------------
# get line
def isGeneRnaProteinlist(name):
    if name in snp_name: return name, 'SNP'
    if name in gene_name: return name, 'Gene'
    if name in mirna_name: return name, 'miRNA'
    if name in protein_name: return name, 'Protein'


MIC06 = open('MIC0.6.csv', 'r')
for line in islice(MIC06, 1, None):  # ignore 1st line
    name1 = line.split(',')[0]
    name2 = line.split(',')[1]
예제 #24
0
    },
)

pars = {
    'pop_size': pop_size,  # start with a small pool
    'pop_type': pop_type,  # synthpops, hybrid
    'contacts': contacts[pop_type],
    'n_days': 1,
    # 'rand_seed': None,
}

# Create sim
sim = cv.Sim(pars=pars)
sim.initialize()

mnet = pymnet.MultilayerNetwork(aspects=1)

# fig = pl.figure(figsize=(16,16), dpi=120)
mapping = dict(a='All', h='Households', s='Schools', w='Work', c='Community')
colors = sc.vectocolor(sim.people.age, cmap='turbo')

keys = list(contacts[pop_type].keys())
keys.remove('c')
# nrowcol = np.ceil(np.sqrt(len(keys)))

G = nx.MultiGraph()

node_set = set()
home_set = set()
school_set = set()
work_set = set()
예제 #25
0
def aggregate(net,aspects,newNet=None,selfEdges=False):
    """Reduces the number of aspects by aggregating them.

    This function aggregates edges from multilayer aspects together
    by summing their weights. Any number of aspects is allowed, and the
    network can have non-diagonal inter-layer links. The layers cannnot 
    be weighted such that they would have different coefficients when 
    the weights are summed together.

    Note that no self-links are created and all the inter-layer links
    are disregarded.

    Parameters
    ----------
    net : MultilayerNetwork
       The original network.
    aspects : int or tuple 
       The aspect which is aggregated over,or a tuple if many aspects
    newNet : MultilayerNetwork 
       Empty network to be filled and returned. If None, a new one is 
       created by this function.
    selfEdges : bool 
       If true aggregates self-edges too

    Returns
    -------
    net : MultiplexNetwork
       A new instance of multiplex network which is produced.

    Examples
    --------
    Aggregating the network with a singe aspect can be done as follows:

    >>> n=net.MultiplexNetwork([('categorical',1.0)])
    >>> an=transforms.aggregate(n,1)

    You need to choose which aspect(s) to aggregate over if the network 
    has multiple aspects:

    >>> n=MultiplexNetwork([2*('categorical',1.0)])
    >>> an1=transforms.aggregate(n,1)
    >>> an2=transforms.aggregate(n,2)
    >>> an12=transforms.aggregate(n,(1,2))
    """
    try:
        aspects=int(aspects)
        aspects=(aspects,)
    except TypeError:
        pass
    
    if newNet==None:
        newNet=netmodule.MultilayerNetwork(aspects=net.aspects-len(aspects),
                                 noEdge=net.noEdge,
                                 directed=net.directed,
                                 fullyInterconnected=net.fullyInterconnected)
    assert newNet.aspects==net.aspects-len(aspects)
    for d in aspects:
        assert 0<d<=(net.aspects+1)

    #Add nodes
    for node in net:
        newNet.add_node(node)
    
    #Add edges
    edgeIndices=list(filter(lambda x:math.floor(x/2) not in aspects,range(2*(net.aspects+1))))
    for edge in net.edges:
        newEdge=[]
        for index in edgeIndices:
            newEdge.append(edge[index])
        if selfEdges or not newEdge[0::2]==newEdge[1::2]:
            newNet[tuple(newEdge)]=newNet[tuple(newEdge)]+edge[-1]

    #Add node-layer tuples (if not node-aligned)
    if not net.fullyInterconnected and newNet.aspects>0:
        nodeIndices=list(filter(lambda x:x not in aspects,range(1,net.aspects+1)))
        for nlt in net.iter_node_layers():
            newlayer=[]
            for a in nodeIndices:
                newlayer.append(nlt[a])
            #we need to use the public interface for adding nodes which means that
            #layers are only given as tuples for multi-aspect networks
            if len(newlayer)==1: 
                newNet.add_node(nlt[0],layer=newlayer[0])
            else:
                newNet.add_node(nlt[0],layer=newlayer)

    return newNet