Beispiel #1
0
def getDCS_Greedy(graphs,nodes):
    global numberOfNodes
    global numberOfEdges
    numberOfNodes = len(nodes)
    # Create a table of nodes by degree, to enable faster lookup.
    utils.timer()
    initLookupTable(graphs)
    print("initilization took " + str(utils.timer()))
    # Pass 1, to find the subgraph with the highest density.
    highestDensity = 0
    originalNumberOfNodes = numberOfNodes
    while numberOfNodes > 1:
        #printProgress("Searching for highest density: ",
        #    originalGraphs[0].GetNodes(), graphs[0].GetNodes())
        currDensity = densityMultiple(graphs)
        if (highestDensity < currDensity):
            highestDensity = currDensity
            optimalNumberOfNodes = numberOfNodes
        node = takeSmallestDegree(graphs)
        numberOfNodes -= 1
        updateLists(graphs, node)
    print("first pass took: " + str(utils.timer()))
    print("Highest density found: " + str(highestDensity))
    # Pass 2, look for the subgraph that has a density equal to highest seen.
    numberOfNodes = originalNumberOfNodes
    initLookupTable(graphs)
    while numberOfNodes > optimalNumberOfNodes:
        node = takeSmallestDegree(graphs)
        numberOfNodes -=1
        updateLists(graphs,node)

    nodesInSubGraph = getNodesInSubGraph(nodes)
    print("second pass took: " + str(utils.timer()))
    print("we found a total number of nodes = " + str(optimalNumberOfNodes))
    return (nodesInSubGraph,highestDensity)
Beispiel #2
0
def getDCS_Greedy(graphs, nodes):
    global numberOfNodes
    global numberOfEdges
    numberOfNodes = len(nodes)
    # Create a table of nodes by degree, to enable faster lookup.
    utils.timer()
    initLookupTable(graphs)
    print("initilization took " + str(utils.timer()))
    # Pass 1, to find the subgraph with the highest density.
    highestDensity = 0
    originalNumberOfNodes = numberOfNodes
    while numberOfNodes > 1:
        #printProgress("Searching for highest density: ",
        #    originalGraphs[0].GetNodes(), graphs[0].GetNodes())
        currDensity = densityMultiple(graphs)
        if (highestDensity < currDensity):
            highestDensity = currDensity
            optimalNumberOfNodes = numberOfNodes
        node = takeSmallestDegree(graphs)
        numberOfNodes -= 1
        updateLists(graphs, node)
    print("first pass took: " + str(utils.timer()))
    print("Highest density found: " + str(highestDensity))
    # Pass 2, look for the subgraph that has a density equal to highest seen.
    numberOfNodes = originalNumberOfNodes
    initLookupTable(graphs)
    while numberOfNodes > optimalNumberOfNodes:
        node = takeSmallestDegree(graphs)
        numberOfNodes -= 1
        updateLists(graphs, node)

    nodesInSubGraph = getNodesInSubGraph(nodes)
    print("second pass took: " + str(utils.timer()))
    print("we found a total number of nodes = " + str(optimalNumberOfNodes))
    return (nodesInSubGraph, highestDensity)
Beispiel #3
0
from lib import snap
import sys
import time
from greedy import DCSGreedy, utils, preprocessGreedy

if (len(sys.argv) < 2):
    sys.exit("Usage: python " + sys.argv[0] + " <file1> <file2> ...")

utils.timer()  # Start the timer.
if (sys.argv[1] == "-d"):
    graphs = preprocessGreedy.loadDirGraphs()
else:
    graphs = preprocessGreedy.loadGraphs()

print("Imported " + str(len(graphs)) + " graphs in " + utils.timer())
# Don't remove, this isn't the deep preprocessing that takes time.
numberOfNodes = preprocessGreedy.simplePreprocessing(graphs)

# If multiple graphs, do some preprocessing to make sure they are over the same
# set of nodes.
# Make sure all graphs have the same amount of nodes.
# There could be a deeper check here.

print("Preprocessing took " + utils.timer())
startTime = time.clock()
(nodes, density) = DCSGreedy.getDCS_Greedy(graphs, numberOfNodes)
runTime = time.clock() - startTime

utils.saveResults(nodes, str(runTime) + " seconds", density)

# printQuickStats(g2)
Beispiel #4
0
from lib import snap
import sys
import time
from greedy import DCSGreedy, utils, preprocessGreedy

if(len(sys.argv) < 2):
  sys.exit("Usage: python " + sys.argv[0] + " <file1> <file2> ...")
  
utils.timer() # Start the timer.
if(sys.argv[1] == "-d"):
    graphs = preprocessGreedy.loadDirGraphs()
else:
    graphs = preprocessGreedy.loadGraphs()

print("Imported " + str(len(graphs)) + " graphs in " + utils.timer())
# Don't remove, this isn't the deep preprocessing that takes time.
numberOfNodes = preprocessGreedy.simplePreprocessing(graphs)

# If multiple graphs, do some preprocessing to make sure they are over the same
# set of nodes.
# Make sure all graphs have the same amount of nodes.
# There could be a deeper check here.

print("Preprocessing took " + utils.timer())
startTime = time.clock()
(nodes,density) = DCSGreedy.getDCS_Greedy(graphs,numberOfNodes)
runTime = time.clock()-startTime

utils.saveResults(nodes,str(runTime) + " seconds",density)

# printQuickStats(g2)