def evalOnePair(g, lli, s, d, lowerBoundCounts, dagSize_stretchToCounts, pairsWithDagSizeSmallerThanLowerBound, codec2HdrLen_stretchToCounts, codec4HdrLen_stretchToCounts, pairsWithLargeCodec4Encodings, M): lowerBound = None for stretch in stretches: pp, dp = getDgWithStretch(g, s, d, weighted, stretch) if (not pp): print 'no path: s,d="%s","%s"' % (s,d) return if dp == None: dp = {} pass dag, virtualDetourPaths = approach2.getDagWithVnodes( pp, dp, returnDetourPathsWithVNodes=True) codec2HdrLen = codec2.encode( dag, pp, virtualDetourPaths, lli, False, s, d, useLinkIdLenPrefix=False, roundUpToMultipleBits=8)[0] addValueCount(codec2HdrLen_stretchToCounts[stretch], codec2HdrLen) codec4HdrLen = codec4.encode( pp, dp, lli, s, d, roundUpToMultipleBits=8) addValueCount(codec4HdrLen_stretchToCounts[stretch], codec4HdrLen) dagSize = dag.number_of_edges() addValueCount(dagSize_stretchToCounts[stretch], dagSize) if codec4HdrLen > headerLengthThreshold: pairsWithLargeCodec4Encodings[stretch].append((s,d)) pass if lowerBound is None: lowerBound = computeBounds(g, s, d, pp, M, weighted) addValueCount(lowerBoundCounts, lowerBound) pass if dagSize < lowerBound: pairsWithDagSizeSmallerThanLowerBound[stretch].append((s,d)) pass pass return
def testRocketFuel3967(): filepath = '../../../graphs/rocketfuel/3967/weights.intra' g, lli = utils.textToG(filepath, useInt=False) weighted = True testCases = ( ('317', '431', '01100010101101010001001010100010101010',), ) failureSrcDstPairs = [] for s,d, expectedString in testCases: expectedEncodingBs = bitstring.BitString('0b' + expectedString) pp, dp = approach2.getDg(g, s, d, weighted) dag = approach2.getDagWithVnodes( pp, dp, returnDetourPathsWithVNodes=False) encodingBs = encode( pp, dp, lli, s, d, returnActualEncoding=True, roundUpToMultipleBits=1) if encodingBs != expectedEncodingBs: failureSrcDstPairs.append((s,d)) pass pass func_name = inspect.getframeinfo(inspect.currentframe())[2] print 'Test', func_name, 'result:' if len(failureSrcDstPairs) == 0: print ' passed' pass else: print ' failed' print ' The failed src-dst pairs:' for s,d in (failureSrcDstPairs): print 's,d=%s,%s' %(repr(s),repr(d)) pass pass return
def evalOneFile(filename, numberOfPairsToTry, pairIsOrdered=False, weighted=False): startdateSecs = int(time.time()) print ''' _______________________________________________ filename: [%s] start date: [%s] ''' % (filename, time.ctime(startdateSecs)) g, lli = utils.textToG(filename, useInt=False, ignoreWeights=(not weighted)) allNodes = g.nodes() numNodes = len(allNodes) i = 0 srcDstPairResults3 = {} if numberOfPairsToTry > 0: while i < numberOfPairsToTry: # this is crypto random integer idx1 = (struct.unpack('I', os.urandom(4))[0]) % numNodes idx2 = (struct.unpack('I', os.urandom(4))[0]) % numNodes while idx2 == idx1: idx2 = (struct.unpack('I', os.urandom(4))[0]) % numNodes pass s,d = allNodes[idx1], allNodes[idx2] if (s,d) in srcDstPairResults3: # definitely skip print 'pair (%s,%s) already encountered -> skip' % (s,d) continue elif (d,s) in srcDstPairResults3: # not seen (s,d) yet but seen (d,s), should skip or not? if not pairIsOrdered: print 'pairs are not ordered, and (%s,%s) already encountered -> skip' % (d,s) continue pass # do this so we know we have seen this (s, d) pair srcDstPairResults3[(s, d)] = None # init to None (which will # mean disconnected) print 's,d="%s","%s"' % (s,d) i += 1 pass # end while i < numberOfPairsToTry pass # end if numberOfPairsToTry > 0 else: # numberOfPairsToTry is <= 0, so we do all (un-ordered) # pairs. the graph'd better be not too big. for i in range(numNodes - 1): for j in range(i + 1, numNodes): s = allNodes[i] d = allNodes[j] print 's,d="%s","%s"' % (s,d) srcDstPairResults3[(s, d)] = None # init to None (which will # mean disconnected) pass pass pass ########################### # now that we have the pairs we want to eval, eval them for s, d in srcDstPairResults3.keys(): #### use approach2 hdrLens2Normal = {} dagsizeNormal = [] hdrLens2Smaller = {} dagsizeSmaller = [] pp, dp = approach2.getDg(g, s, d, weighted=weighted, everyOtherNodeUsePredecessor=False) if (not pp) or (not dp): print 'no path: s,d="%s","%s"' % (s,d) continue for hdrlenDict, dagsizeList, onlyLaterTwoThirds \ in ((hdrLens2Normal, dagsizeNormal, False), (hdrLens2Smaller, dagsizeSmaller, True)): # if onlyLaterTwoThirds if onlyLaterTwoThirds: # remove from "dp" entries of the first 1/3 of pnodes pplen = len(pp) for pnode in pp[:int(float(pplen)/3)]: if pnode in dp: del dp[pnode] pass pass pass dag, virtualDetourPaths = approach2.getDagWithVnodes( pp, dp, returnDetourPathsWithVNodes=True) dagsizeList.append(dag.number_of_edges()) for offsetPtrAlignment in offsetPtrAlignments: hdrlenDict[offsetPtrAlignment], \ _ = approach2.getHeader2( dag, pp, virtualDetourPaths, lli, False, s, d, roundUpToMultipleBits=offsetPtrAlignment) pass pass if len(hdrLens2Smaller) == 0: assert len(hdrLens2Normal) == len(dagsizeSmaller) == len(dagsizeNormal) == 0 pass else: srcDstPairResults3[(s, d)] = SrcDstPairResult3( hdrLens2Normal, dagsizeNormal[0], hdrLens2Smaller, dagsizeSmaller[0]) pass pass # end while loop enddateSecs = int(time.time()) fileResult3 = FileResult3(filename, startdateSecs, enddateSecs, srcDstPairResults3) return fileResult3
weights = map(lambda (u, v, edgeData): edgeData['weight'], g.edges(data=True)) maxWeight = max(weights) minWeight = min(weights) assert minWeight > 0 M = float(maxWeight) / float(minWeight) for s, d in srcDstPairResults3.keys(): #### use approach2 pp, dp = approach2.getDg(g, s, d, weighted) if (not pp) or (not dp): print 'no path: s,d="%s","%s"' % (s,d) continue dag = approach2.getDagWithVnodes(pp, dp) lowerBound, upperBound = computeBounds(g, s, d, pp, M, pathFunction) srcDstPairResults3[(s, d)] = SrcDstPairResult3( lowerBound, upperBound, dag.number_of_edges()) pass # end while loop enddateSecs = int(time.time()) fileResult3 = FileResult3(filename, startdateSecs, enddateSecs, srcDstPairResults3) return fileResult3
def cmd_showPairsWithDagSizeSmallerThanLowerBound(argv): argvidx = 0 cmdname = argv[argvidx] argvidx += 1 assert 'cmd_' + cmdname == inspect.stack()[0][3] showDetails = False opts, args = getopt.getopt(argv[argvidx:], '', ['showDetails', ]) ## parse options for o, a in opts: if o == '--showDetails': showDetails = True pass pass dirpaths = args assert len(dirpaths) > 0 curGraphFilePath = None for dirpath in dirpaths: filenames = os.listdir(dirpath) for filename in filenames: filepath = dirpath + '/' + filename pr = utils.unpickleStuff(filepath) if showDetails and (pr.filename != curGraphFilePath): g, _ = utils.textToG(pr.filename, useInt=False, ignoreWeights=not pr.weighted) # calculate M for computeBounds() if pr.weighted: weights = map(lambda (u, v, edgeData): edgeData['weight'], g.edges(data=True)) maxWeight = max(weights) minWeight = min(weights) assert minWeight > 0 M = float(maxWeight) / float(minWeight) pass else: M = float(1) pass pass for stretch in stretches: for (s,d) in pr.pairsWithDagSizeSmallerThanLowerBound[stretch]: if showDetails: pp, dp = getDgWithStretch(g, s, d, pr.weighted, stretch) if dp is None: dp = {} pass dag, virtualDetourPaths = approach2.getDagWithVnodes( pp, dp, returnDetourPathsWithVNodes=True) lowerBound = computeBounds(g, s, d, pp, M, pr.weighted) print 's,d=%s,%s; #OfEdges(pp)=%u, #OfEdges(dps)=%u, lowerBound=%u, dagSize=%u' % ( repr(s), repr(d), len(pp)-1, sum(map(lambda p: len(p) - 1, dp.values())), lowerBound, dag.number_of_edges() ) pass else: print 's,d=%s,%s' % (repr(s),repr(d)) pass pass pass pass pass return
def evalOnePair(g, lli, localLinkLabelLens, s, d, lowerBoundCounts, dagSize_stretchToCounts, pairsWithDagSizeSmallerThanLowerBound, codec2HdrLen_stretchToCounts, codec4HdrLen_stretchToCounts, singlePath_encodingLen_counts, pairsWithLargeCodec4Encodings, M): lowerBound = None for stretch in stretches: pp, dp = getDgWithStretch(g, s, d, weighted, stretch) if (not pp): print 'no path: s,d="%s","%s"' % (s,d) return if dp == None: dp = {} pass dag, virtualDetourPaths = approach2.getDagWithVnodes( pp, dp, returnDetourPathsWithVNodes=True) try: # in bits codec2HdrLen = codec2.encode( dag, pp, virtualDetourPaths, lli, False, s, d, useLinkIdLenPrefix=False, localLinkLabelLens=localLinkLabelLens, returnActualEncoding=False, roundUpToMultipleBits=1)[0] pass except Exception, e: print 'WARNING: pair s=%s,d=%s is problematic\n' %(s,d) print str(e) return # convert to bytes codec2HdrLen = int(math.ceil(float(codec2HdrLen)/8)) addValueCount(codec2HdrLen_stretchToCounts[stretch], codec2HdrLen) # in bits codec4HdrLen = codec4.encode( pp, dp, lli, localLinkLabelLens, s, d, roundUpToMultipleBits=1, returnActualEncoding=False) # convert to bytes codec4HdrLen = int(math.ceil(float(codec4HdrLen)/8)) addValueCount(codec4HdrLen_stretchToCounts[stretch], codec4HdrLen) # just sum up the all the link label lengths singlePath_encodingLen = sum(map(lambda n: localLinkLabelLens[n], pp[:-1])) # get number of bytes from number of bits singlePath_encodingLen = int(math.ceil(float(singlePath_encodingLen)/8)) addValueCount(singlePath_encodingLen_counts, singlePath_encodingLen) dagSize = dag.number_of_edges() addValueCount(dagSize_stretchToCounts[stretch], dagSize) if codec4HdrLen > headerLengthThreshold: pairsWithLargeCodec4Encodings[stretch].append((s,d)) pass if lowerBound is None: lowerBound = computeBounds(g, s, d, pp, M, weighted) addValueCount(lowerBoundCounts, lowerBound) pass if dagSize < lowerBound: pairsWithDagSizeSmallerThanLowerBound[stretch].append((s,d)) pass pass
def evalOneFile(filename, numberOfPairsToTry, pairIsOrdered=False, weighted=False): startdateSecs = int(time.time()) print ''' _______________________________________________ filename: [%s] start date: [%s] ''' % (filename, time.ctime(startdateSecs)) g, lli = utils.textToG(filename, useInt=False) allNodes = g.nodes() numNodes = len(allNodes) i = 0 srcDstPairResults3 = {} if numberOfPairsToTry > 0: while i < numberOfPairsToTry: # this is crypto random integer idx1 = (struct.unpack('I', os.urandom(4))[0]) % numNodes idx2 = (struct.unpack('I', os.urandom(4))[0]) % numNodes while idx2 == idx1: idx2 = (struct.unpack('I', os.urandom(4))[0]) % numNodes pass s,d = allNodes[idx1], allNodes[idx2] if (s,d) in srcDstPairResults3: # definitely skip print 'pair (%s,%s) already encountered -> skip' % (s,d) continue elif (d,s) in srcDstPairResults3: # not seen (s,d) yet but seen (d,s), should skip or not? if not pairIsOrdered: print 'pairs are not ordered, and (%s,%s) already encountered -> skip' % (d,s) continue pass # do this so we know we have seen this (s, d) pair srcDstPairResults3[(s, d)] = None # init to None (which will # mean disconnected) print 's,d="%s","%s"' % (s,d) i += 1 pass # end while i < numberOfPairsToTry pass # end if numberOfPairsToTry > 0 else: # numberOfPairsToTry is <= 0, so we do all (un-ordered) # pairs. the graph'd better be not too big. for i in range(numNodes - 1): for j in range(i + 1, numNodes): s = allNodes[i] d = allNodes[j] print 's,d="%s","%s"' % (s,d) srcDstPairResults3[(s, d)] = None # init to None (which will # mean disconnected) pass pass pass ########################### # now that we have the pairs we want to eval, eval them for s, d in srcDstPairResults3.keys(): #### use approach2 pp, dp = approach2.getDg(g, s, d, weighted) if (not pp) or (not dp): print 'no path: s,d="%s","%s"' % (s,d) continue # how many nodes in the undirected graph? subnodes = set(pp) for path in dp.values(): subnodes.update(path) pass dag = approach2.getDagWithVnodes( pp, dp, returnDetourPathsWithVNodes=False) diff = dag.number_of_nodes() - len(subnodes) percent = (float(diff) / len(subnodes)) * 100 srcDstPairResults3[(s, d)] = SrcDstPairResult3( diff, percent) pass # end while loop enddateSecs = int(time.time()) fileResult3 = FileResult3(filename, startdateSecs, enddateSecs, srcDstPairResults3) return fileResult3