Beispiel #1
0
def metrics(a,
            appVal,
            degenName="NoModel",
            pcLoss="0",
            edgePCCons = [v for v in range(1,11)],
            dVal = "2",
            thresholdtype = "local"
            ):
    appValW = appVal
    appValT = appVal


#    # prepare matrices for weighted measures
#    pcCent = mbt.np.zeros((len(a.G.nodes()), 10))
#    betCentT = mbt.np.zeros((len(a.G.nodes()), 10))
#    nM = mbt.np.zeros((10))
#    wmd = mbt.np.zeros((len(a.G.nodes()), 10))
    
    
#    pcCentNm = mbt.np.zeros((len(a.G.nodes()), 10))
#    nMNm = mbt.np.zeros((10))
#    wmdNm = mbt.np.zeros((len(a.G.nodes()), 10))    
#    QNm = mbt.np.zeros((10))

    # unweighted measures
    for n,e in enumerate(edgePCCons):
        ofb = '_'.join(["brain", degenName, thresholdtype, str(e), "d"+dVal+"_"])

        a.localThresholding(edgePC=e)
        
        a.makebctmat()
        a.weightToDistance()

        propDict = {"edgePC":str(a.edgePC),
                    "pcLoss":pcLoss}
        degs = a.G.degree(weight='weight')
        extras.writeResults(degs, "degreeWt", ofb, propDict=propDict,
                            append=appVal)

	# weighted betweenness centrality
#        bcT = mbt.centrality.betweenness_centrality(a.G, weight='distance')    
#        betCentT[:,n] = [bcT[v] for v in a.G.nodes()]

#        # weighted modularity metrics
#        ci = bct.modularity_louvain_und_sign(a.bctmat)
#        Q = ci[1]
#        ciN = a.assignbctResult(ci[0])
#        extras.writeResults(Q, "QWt", ofb, propDict=propDict, append=appValT)
#        extras.writeResults(ciN, "ciWt", ofb, propDict=propDict, append=appValT)  
#        
#        nMWt = len(mbt.np.unique(ci[0]))
#        nM[n] = nMWt
#        extras.writeResults(nMWt, "nMWt", ofb, propDict=propDict, append=appValT)
#        del(nMWt)
#    
#        wmdWt = extras.withinModuleDegree(a.G, ciN, weight='weight')
#        wmd[:,n] = [wmdWt[v] for v in a.G.nodes()]
#        extras.writeResults(wmdWt, "wmdWt", ofb, propDict=propDict, append=appValT)
#        del wmdWt
#        
#        pcCentWt = bct.participation_coef_sign(a.bctmat,ci[0])
#        pcCent[:,n] = pcCentWt
#        pcCentWt = a.assignbctResult(pcCentWt)
#        extras.writeResults(pcCentWt, "pcCentWt", ofb, propDict=propDict, append=appValT)
#        
#        # infomap partitioning
#        bIM = infomap.nx2infomap(a.G)
#        del(bIM)
#        try:
#            f = open("nxdigraph.clu", "r") # recapture results from output file
#            modules = mbt.np.array([int(v.strip('\n')) for v in f.readlines()[1:]])
#            f.close()
#            remove("nxdigraph.clu")
#    
#            ciNIM = a.assignbctResult(modules)
#            QIM = community.modularity(ciNIM, a.G)            
#            
#            pcCentIM = bct.participation_coef_sign(a.bctmat, modules)
#            pcCentIM = a.assignbctResult(pcCentIM)
#            wmdIM = extras.withinModuleDegree(a.G, ciNIM, weight='weight')
#            nMIM = len(np.unique(ciNIM[0]))
#        
#        except:
#            modules = np.array((np.repeat(np.nan, len(a.G.nodes()))))
#            QIM = "NA"
#            pcCentIM = {v:"NA" for v in a.G.nodes()}
#            wmdIM = {v:"NA" for v in a.G.nodes()}
#            ciNIM = {v:"NA" for v in a.G.nodes()}
#            nMIM = 0
#    
#        extras.writeResults(QIM, "QIM", ofbT, propDict=propDict, append=appValT)
#        extras.writeResults(ciNIM, "ciIM", ofbT, propDict=propDict, append=appValT)
#        del QIM
#        
#        extras.writeResults(nMIM, "nMIM", ofbT, propDict=propDict, append=appValT)
#        del nMIM
#        
#        extras.writeResults(pcCentIM, "pcCentIM", ofbT, propDict=propDict, append=appValT)
#        del pcCentIM
#    
#        extras.writeResults(wmdIM, "wmdIM", ofbT, propDict=propDict, append=appValT)
#        del(wmdIM, ciNIM)
#
#
#        # infomap partitioning
#        bIM = infomap.nx2infomap(a.G)
#        del(bIM)
#        f = open("nxdigraph.clu", "r") # recapture results from output file
#        modules = mbt.np.array([int(v.strip('\n')) for v in f.readlines()[1:]])
#        f.close()
#        remove("nxdigraph.clu")
#        
#        ciNIM = a.assignbctResult(modules)
#        QIMWt = community.modularity(ciNIM, a.G)
#        QIM[n] = QIMWt
#        extras.writeResults(QIMWt, "QIMWt", ofbT, propDict=propDict,append=appValT)
#        extras.writeResults(ciNIM, "ciIMWt", ofbT, propDict=propDict, append=appValT)
#        del(QIMWt)
#        
#        nMIMWt = len(np.unique(modules))
#        nMIM[n] = nMIMWt
#        extras.writeResults(nMIMWt, "nMIMWt", ofbT, propDict=propDict, append=appValT)
#        del(nMIMWt)
#        
#        pcCentIMWt = bct.participation_coef_sign(a.bctmat, modules)
#        pcCentIM[:,n] = pcCentIMWt
#        pcCentIMWt = a.assignbctResult(pcCentIMWt)
#        extras.writeResults(pcCentIMWt, "pcCentIMWt", ofbT, propDict=propDict, append=appValT)
#        del(pcCentIMWt)
#        
#        wmdIMWt = extras.withinModuleDegree(a.G, ciNIM, weight='weight')
#        wmdIM[:,n] = [wmdIMWt[v] for v in a.G.nodes()]
#        extras.writeResults(wmdIMWt, "wmdIMWt", ofbT, propDict=propDict, append=appValT)
#        del wmdIMWt


#        # Newman partitioning
#        ciNm = bct.modularity_und(a.bctmat)
#        QNmWt = ciNm[1]
#        QNm[n] = QNmWt
#        ciNNm = a.assignbctResult(ciNm[0])
#        extras.writeResults(QNmWt, "QNmWt", ofb, propDict=propDict, append=appValT)
#        extras.writeResults(ciNNm, "ciNmWt", ofb, propDict=propDict, append=appValT)  
#        
#        nMNmWt = len(mbt.np.unique(ciNm[0]))
#        nMNm[n] = nMNmWt
#        extras.writeResults(nMNmWt, "nMNmWt", ofb, propDict=propDict, append=appValT)
#        del(nMNmWt)
#    
#        pcCentNmWt = bct.participation_coef_sign(a.bctmat,ciNm[0])
#        pcCentNm[:,n] = pcCentNmWt
#        pcCentNmWt = a.assignbctResult(pcCentNmWt)
#        extras.writeResults(pcCentNmWt, "pcCentNmWt", ofb, propDict=propDict, append=appValT)
#        
#        wmdNmWt = extras.withinModuleDegree(a.G, ciNNm, weight='weight')
#        wmdNm[:,n] = [wmdNmWt[v] for v in a.G.nodes()]
#        extras.writeResults(wmdNmWt, "wmdNmWt", ofb, propDict=propDict, append=appValT)
#        del wmdNmWt

        appValT=True

	# now to collect measures in a binary graph
        a.binarise()

        a.weightToDistance()
        a.makebctmat()
        
        #### small worldness metrics ####
        degs = mbt.nx.degree(a.G)
        extras.writeResults(degs, "degree", ofb, propDict=propDict,
                            append=appVal)
        
#        clustCoeff = mbt.nx.average_clustering(a.G)
#        extras.writeResults(clustCoeff, "clusterCoeff", ofb, propDict=propDict,
#                            append=appVal)
#        del(clustCoeff)
#        
#        pl = mbt.nx.average_shortest_path_length(a.G)
#        extras.writeResults(pl, "pl", ofb, propDict=propDict, append=appVal)
#        del(pl)
#        
#        ge = extras.globalefficiency(a.G)
#        extras.writeResults(ge, "ge", ofb, propDict=propDict, append=appVal)
#        del(ge)
#        
#        le = extras.localefficiency(a.G)
#        extras.writeResults(le, "le", ofb, propDict=propDict, append=appVal)
#        del(le)
#    
#        # hub metrics
#        betCent = mbt.centrality.betweenness_centrality(a.G)
#        extras.writeResults(betCent, "betCent", ofb, propDict=propDict,
#                            append=appVal)
#        
#        closeCent = mbt.centrality.closeness_centrality(a.G)
#        extras.writeResults(closeCent, "closeCent", ofb, propDict=propDict,
#                            append=appVal)
#         
#        hs = extras.hubscore(a.G, bc=betCent, cc=closeCent, degs=degs,
#                             weighted=False)
#        extras.writeResults(hs, "hs", ofb, propDict=propDict, append=appVal)
#        del(hs, betCent, closeCent, degs)
#         
#        try:
#            eigCent = mbt.centrality.eigenvector_centrality_numpy(a.G)
#        except:
#            eigCent = dict(zip(a.G.nodes(), ['NA' for n in a.G.nodes()]))
#        extras.writeResults(eigCent, "eigCentNP", ofb, propDict=propDict,
#                            append=appVal)
#        del(eigCent)
#        
#        eln = extras.edgeLengths(a.G, nodeWise=True)
#        extras.writeResults(eln, "eln", ofb, propDict=propDict, append=appVal)
#        del(eln)
#        
#        el = extras.edgeLengths(a.G)
#        meanEL = mbt.np.mean(mbt.np.array((el.values()), dtype=float))
#        extras.writeResults(meanEL, "meanEL", ofb, propDict=propDict,
#                            append=appVal)
#    
#        medianEL = mbt.np.median(mbt.np.array((el.values()), dtype=float))
#        extras.writeResults(medianEL, "medianEL", ofb, propDict=propDict,
#                            append=appVal)
#    
#        del(el, meanEL, medianEL)
        
#        # modularity metrics
#        ci = bct.modularity_louvain_und(a.bctmat)
#        Q = ci[1]
#        ciN = a.assignbctResult(ci[0])
#        extras.writeResults(Q, "Q", ofb, propDict=propDict, append=appVal)
#        extras.writeResults(ciN, "ci", ofb , propDict=propDict, append=appVal)  
#        
#        pcCent = bct.participation_coef(a.bctmat,ci[0])
#        pcCent = a.assignbctResult(pcCent)
#        extras.writeResults(pcCent, "pcCent", ofb, propDict=propDict,
#                            append=appVal)
#        del pcCent
#        
#        wmd = extras.withinModuleDegree(a.G, ciN)
#        extras.writeResults(wmd, "wmd", ofb, propDict=propDict, append=appVal)
#        del wmd    
#        
#        nM = len(mbt.np.unique(ci[0]))
#        extras.writeResults(nM, "nM", ofb, propDict=propDict, append=appVal)
#        del(nM)
#        del(ci, ciN, Q)
#        
#        # rich club measures
#        rc = mbt.nx.rich_club_coefficient(a.G, normalized=False)
#        extras.writeResults(rc, "rcCoeff", ofb, propDict=propDict, append=appVal)
#        del(rc)
#        # robustness
#        rbt = a.robustness()
#        extras.writeResults(rbt, "robustness", ofb, propDict=propDict,
#                            append=appVal)
#
#        # infomap partitioning
#        bIM = infomap.nx2infomap(a.G)
#        del(bIM)
#        try:
#            f = open("nxdigraph.clu", "r") # recapture results from output file
#            modules = mbt.np.array([int(v.strip('\n')) for v in f.readlines()[1:]])
#            f.close()
#            remove("nxdigraph.clu")
#    
#            ciNIM = a.assignbctResult(modules)
#            QIM = community.modularity(ciNIM, a.G)            
#            
#            pcCentIM = bct.participation_coef_sign(a.bctmat, modules)
#            pcCentIM = a.assignbctResult(pcCentIM)
#            wmdIM = extras.withinModuleDegree(a.G, ciNIM, weight='weight')
#            nMIM = len(np.unique(ciNIM[0]))
#        
#        except:
#            modules = np.array((np.repeat(np.nan, len(a.G.nodes()))))
#            QIM = "NA"
#            pcCentIM = {v:"NA" for v in a.G.nodes()}
#            wmdIM = {v:"NA" for v in a.G.nodes()}
#            ciNIM = {v:"NA" for v in a.G.nodes()}
#            nMIM = 0
#        
#        extras.writeResults(QIM, "QIM", ofbT, propDict=propDict, append=appValT)
#        extras.writeResults(ciNIM, "ciIM", ofbT, propDict=propDict, append=appValT)
#        del QIM
#        
#        extras.writeResults(nMIM, "nMIM", ofbT, propDict=propDict, append=appValT)
#        del nMIM
#        
#        extras.writeResults(pcCentIM, "pcCentIM", ofbT, propDict=propDict, append=appValT)
#        del pcCentIM
#    
#        extras.writeResults(wmdIM, "wmdIM", ofbT, propDict=propDict, append=appValT)
#        del(wmdIM, ciNIM)
#
#        # Newman partitioning
#        ciNm = bct.modularity_und(a.bctmat)
#        QNm= ciNm[1]
#        ciNNm = a.assignbctResult(ciNm[0])
#        extras.writeResults(QNm, "QNm", ofb, propDict=propDict, append=appValT)
#        extras.writeResults(ciNNm, "ciNm", ofb, propDict=propDict, append=appValT)
#        del QNm
#        
#        nMNm = len(mbt.np.unique(ciNm[0]))
#        extras.writeResults(nMNm, "nMNm", ofb, propDict=propDict, append=appValT)
#        del nMNm
#    
#        pcCentNm = bct.participation_coef_sign(a.bctmat,ciNm[0])
#        pcCentNm = a.assignbctResult(pcCentNm)
#        extras.writeResults(pcCentNm, "pcCentNm", ofb, propDict=propDict, append=appValT)
#        del pcCentNm
#        
#        wmdNm = extras.withinModuleDegree(a.G, ciNNm, weight='weight')
#        extras.writeResults(wmdNm, "wmdNm", ofb, propDict=propDict, append=appValT)
#        del(wmdNm,ciNNm,ciNm)

        # append any further iterations
        appVal = True
    
#    propDict = {"pcLoss":pcLoss}
#    # weighted measures
#    a.adjMatThresholding(MST=False)
#    a.weightToDistance()
#    ofb = '_'.join(["brain", degenName, "d"+dVal+"_"])
#    a.makebctmat()
#    
#    # weighted hub metrics
#    degs = a.G.degree(weight='weight')
#    extras.writeResults(degs, "degree_wt", ofb, propDict=propDict, append=appValW)
#    
#    betCent = mbt.centrality.betweenness_centrality(a.G, weight='distance')
#    extras.writeResults(betCent, "betCent_wt", ofb, propDict=propDict, append=appValW)
#    
#    closeCent = mbt.centrality.closeness_centrality(a.G, distance='distance')
#    extras.writeResults(closeCent, "closeCent_wt", ofb, propDict=propDict, append=appValW)
#    
#    eigCent = mbt.centrality.eigenvector_centrality_numpy(a.G)
#    extras.writeResults(eigCent, "eigCentNP_wt", ofb, propDict=propDict, append=appValW)
#    del(eigCent)
    
#    # weighted modularity metrics
#    ci = bct.modularity_louvain_und_sign(a.bctmat)
#    Q = ci[1]
#    ciN = a.assignbctResult(ci[0])
#    extras.writeResults(Q, "Q_wt", ofb, propDict=propDict, append=appValW)
#    extras.writeResults(ciN, "ci_wt", ofb, propDict=propDict, append=appValW)  
#    
#    nM = len(mbt.np.unique(ci[0]))
#    extras.writeResults(nM, "nM_wt", ofb, propDict=propDict, append=appValW)
#    del(nM)
#    
#    wmd = extras.withinModuleDegree(a.G, ciN, weight='weight')
#    extras.writeResults(wmd, "wmd_wt", ofb, propDict=propDict, append=appValW)
#    del wmd
#        
    appValT=False
        
#    pcCent = a.assignbctResult(mbt.np.mean(pcCent, axis=1))
#    extras.writeResults(pcCent, "pcCentWt_wt", ofb, propDict=propDict, append=appValW)
#    del(pcCent,ci)
#    
#    betCentT = a.assignbctResult(mbt.np.mean(betCentT, axis=1))
#    extras.writeResults(betCentT, "betCentWtT_wt", ofb, propDict=propDict, append=appValW)
#    
#    nM = mbt.np.mean(nM)
#    extras.writeResults(nM, "nMWt_wt", ofb, propDict=propDict, append=appValW)
#    del(nM)
#    
#    wmd = a.assignbctResult(mbt.np.mean(wmd, axis=1))
#    extras.writeResults(wmd, "wmdWt_wt", ofb, propDict=propDict, append=appValW)
#    del(wmd)
#    
#
#    # Infomap
#    QIM = np.mean(QIM)
#    extras.writeResults(QIM, "QIMWt_wt", ofb, append=appVal)
#    del(QIM)
#    
#    pcCentIM = a.assignbctResult(np.mean(pcCentIM, axis=1))
#    extras.writeResults(pcCentIM, "pcCentIMWt_wt", ofb, append=appVal)
#    del(pcCentIM)
#     
#    wmdIM = a.assignbctResult(np.mean(wmdIM, axis=1))
#    extras.writeResults(wmdIM, "wmdIMWt_wt", ofb, append=appVal)
#    del(wmdIM)
#    
#    nMIM = np.mean(nMIM)
#    extras.writeResults(nMIM, "nMIMWt_wt", ofb, append=appVal)
#    del(nMIM)
#
#    # Newman
#    QNm = mbt.np.mean(QNm)
#    extras.writeResults(QNm, "QNmWt_wt", ofb, append=appValW)
#    del(QNm)
#    
#    pcCentNm = a.assignbctResult(mbt.np.mean(pcCentNm, axis=1))
#    extras.writeResults(pcCentNm, "pcCentNmWt_wt", ofb, append=appValW)
#    del(pcCentNm)
#     
#    wmdNm = a.assignbctResult(mbt.np.mean(wmdNm, axis=1))
#    extras.writeResults(wmdNm, "wmdNmWt_wt", ofb, append=appValW)
#    del(wmdNm)
#    
#    nMNm = mbt.np.mean(nMNm)
#    extras.writeResults(nMNm, "nMNmWt_wt", ofb, append=appValW)
#    del(nMNm)
    a.applyThreshold()
a = mbt.brainObj()
appVal = False
appValT = False

# unweighted measures
# iterate through thresholds
for e in edgePCCons:
    ofb = '_'.join(["brain", thresholdtype, str(e), "d"+dVal+"_"])
    propDict = {"edgePC":str(e)} # added properties for results files

    a.importAdjFile(adjMatFile, delimiter=delim)
    a.localThresholding(edgePC=e)  # apply a threshold
    a.removeUnconnectedNodes()     # remove unconnected nodes
    
    degs = a.G.degree(weight='weight')
    extras.writeResults(degs, "degreeWt", ofb, append=appVal)

    a.binarise()  # binarise the graph
    a.importSpatialInfo(parcelFile)  # read spatial information
    a.weightToDistance() # convert weights to distance (for closeness centrality function)
    a.makebctmat() # create an array to be used by the brain connectivity toolbox functions
    
    ofbT = '_'.join(["brain", thresholdtype, str(e), "d"+dVal+"_"])  
   
    #### small worldness metrics ####
    degs = mbt.nx.degree(a.G)  # measure degree
    extras.writeResults(degs, "degree", ofb, propDict=propDict, append=appVal)  # write the results to a file
        
    clustCoeff = mbt.nx.average_clustering(a.G)
    extras.writeResults(clustCoeff, "clusterCoeff", ofb, propDict=propDict, append=appVal)
    del(clustCoeff)