def test(intercept, featureWeight):
    testFeatures = getFeatures(0)
    region = testFeatures.region
    score = []
    for i in range(len(region)):
        score.append({})
    fOut = open("../txt/result.txt",'w')
    for i in range(len(region)):
        for query in region[i]:
            score[i][query] = {}
            for url in region[i][query]:
                score[i][query][url] = \
                dot_product(region[i][query][url],featureWeight) + intercept
            url_sorted_by_value = OrderedDict(sorted(score[i][query].items(), key=lambda x: x[1]))
            url_sorted_by_value_r = list(reversed(url_sorted_by_value))
            out = str(query) + " " + str(i) + " "
            for u in url_sorted_by_value_r:
                out = out + str(u) + " "
            out = out+ "\n"
            fOut.write(out)
def consolidate_features(base_graphs, Gcollab_delta, k):
    features = {}

    Gcollab = base_graphs[graphutils.Graph.COLLAB]
    feature_graphs = graphutils.split_feat_graphs(base_graphs)

    for node in Gcollab.Nodes():
        nodeID = node.GetId()

        for neighborID in graphutils.getKHopN(Gcollab, nodeID, k):
            if nodeID > neighborID:  # swap
                nodeID = neighborID + nodeID
                neighborID = nodeID - neighborID
                nodeID = nodeID - neighborID

            if (nodeID, neighborID) in features:
                continue

            features[(nodeID, neighborID)] = []

    for graph in feature_graphs:
        features = getFeatures(Gcollab, Gcollab_delta, graph, features)

    return features
Example #3
0
def consolidate_features(base_graphs, Gcollab_delta, k):
	features = {}

	Gcollab = base_graphs[graphutils.Graph.COLLAB]
	feature_graphs = graphutils.split_feat_graphs(base_graphs)

	for node in Gcollab.Nodes():
		nodeID= node.GetId()

		for neighborID in graphutils.getKHopN(Gcollab, nodeID, k):
			if nodeID > neighborID:	# swap
				nodeID= neighborID + nodeID
				neighborID= nodeID - neighborID
				nodeID= nodeID - neighborID

			if (nodeID, neighborID) in features:
				continue

			features[(nodeID, neighborID)]= []
				
	for graph in feature_graphs:
		features = getFeatures(Gcollab, Gcollab_delta, graph, features)

	return features
import matplotlib.pyplot as plt

if __name__ == '__main__':
  # setup video capture
	cap = cv2.VideoCapture("input_videos/Easy.mp4")
	ret,img1 = cap.read()
	ret,img2 = cap.read()
	cap.release()

	maxCorners = 20
	qualityLevel = 0.01
	minDistance = 8

	bbox_list = []
	bbox_pts = []
	bbox_list, bbox_pts, new_img = getBoundingBox(img1, bbox_list, bbox_pts)
	img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
	startXs, startYs, _ = getFeatures(img1_gray, bbox_list, maxCorners, qualityLevel, minDistance)
	newXs = startXs + 5
	newYs = startYs + 7
	print(startXs, startYs)
	print(newXs, newYs)
	#nnewXs, nnewYs = estimateAllTranslation(newXs, newYs, img2, img3)
	#print(len(newXs[0]))
	#print(len(newYs[0]))

	plt.figure()
	plt.imshow(img2)
	#plt.plot(newYs, newXs, 'w+')
	plt.axis('off')
	plt.show()
import time
import commands
from getFeatures import *
from featureProcess import getAverage
from featureProcess import normalize
from rank import *
print '##################################'
print '#          Program start         #'
print '##################################'
print '-Stage 1- extract features..'
trainStruct = getFeatures(1)
print '-Stage 2- processing features..'
trainStruct = getAverage(trainStruct)
print '-Stage 3- normalizing features..'
trainStruct = normalize(trainStruct)

print '-Stage 4- generating training set for Weka..'
arffGen(trainStruct.region)
time.sleep(1)

print '-Stage 5- getting trained model parameters..'
beta = commands.getoutput("java WekaTester trainClickThrough.arff").split('\n')
intercept = float(beta[0])
beta = beta[1:len(beta)-1]
for i,item in enumerate(beta):
    beta[i] = float(item)
    print beta[i]

print '-Stage 6- get test set..'
test(intercept,beta)