def getTrainLowFeature(): rootDir = "E:/ImageDataset/train/train_low" paths, counts = getPath.getPath(rootDir) snap = np.zeros((100, 100)) i = 5263 for path in paths: print 'trainlow ' + path image = cv2.imread(path) resize = extractEdge(image) np.save('E:/color/data/spatialDistributionofEdges/{0}.npy'.format(i), resize) i = i + 1 snap = snap + resize counts = float(counts) print counts Ms = snap / counts return Ms
def getTrainHighFeature(): print 'hello' rootDir = "E:/ImageDataset/train/train_high" paths, counts = getPath.getPath(rootDir) pro = np.zeros((100, 100)) i = 1 for path in paths: print 'trainhigh ' + path image = cv2.imread(path) resize = extractEdge(image) np.save('E:/color/data/spatialDistributionofEdges/{0}.npy'.format(i), resize) i = i + 1 pro = pro + resize counts = float(counts) Mp = pro / counts return Mp
from sklearn.grid_search import GridSearchCV import getPath from sklearn.cross_validation import cross_val_score ntrain_high=2262 ntrain_low=6581 ntest_high=2262 ntest_low=6580 ntrain = 8843 ntest = 8842 count = 17685 feature_dim=7 feature_train=np.zeros((feature_dim,ntrain)) root_train='E:/featureData_CUHK/train' paths_train,count_train=getPath.getPath(root_train) i=0 for path in paths_train: feature=np.load(path) feature=np.array(feature) feature_train[i]=feature i=i+1 train_feature=np.transpose(feature_train) #求出训练集的特征 label_train=np.array([]) for i in range(1,ntrain_high+1): label_train=np.append(label_train,1) for j in range(1,ntrain_low+1): label_train=np.append(label_train,0) train_label=np.transpose(label_train)
""" import getPath #import colorPalette import layoutComposition import edgeComposition import GT_layout import GT_edge import blur import dark import Contrasts import HSVcounts root_train = 'E:/ImageDataset_AVA/train/' root_test = 'E:/ImageDataset_AVA/test/' paths_train, counts_train = getPath.getPath(root_train) paths_test, counts_test = getPath.getPath(root_test) root_trainhigh = 'E:/ImageDataset_AVA/train/train_high' root_trainlow = 'E:/ImageDataset_AVA/train/train_low' root_testhigh = 'E:/ImageDataset_AVA/test/test_high' root_testlow = 'E:/ImageDataset_AVA/test/test_low' paths_trainhigh, counts_trainhigh = getPath.getPath(root_trainhigh) paths_trainlow, counts_trainlow = getPath.getPath(root_trainlow) paths_testhigh, counts_testhigh = getPath.getPath(root_testhigh) paths_testlow, counts_testlow = getPath.getPath(root_testlow) layoutComposition.layout(paths_trainhigh, paths_testhigh, paths_trainlow, paths_testlow, paths_train, paths_test) edgeComposition.EC(paths_trainhigh, paths_testhigh, paths_trainlow,
import matplotlib.pyplot as plt from sklearn.metrics import roc_curve, auc, accuracy_score import getPath ntrain_high = 12771 ntrain_low = 12771 ntest_high = 12771 ntest_low = 12771 ntrain = 25542 ntest = 25542 count = 51084 feature_dim = 24 feature_train = np.zeros((feature_dim, ntrain)) root_train = 'E:/efficiency_AVA/data/train' paths_train, count_train = getPath.getPath(root_train) i = 0 for path in paths_train: feature = np.load(path) feature = np.array(feature) feature_train[i] = feature i = i + 1 train_feature = np.transpose(feature_train) np.save('E:/efficiency_AVA/data/trainfeature.npy', train_feature) feature_test = np.zeros((feature_dim, ntest)) root_test = 'E:/efficiency_AVA/data/test' paths_test, count_test = getPath.getPath(root_test) i = 0 for path in paths_test: feature = np.load(path)
init_job = ShortestPathIter(args=[graph, '--source', source, '--destination', destination, '--weighted', weighted, '-r', mode, '--output-dir', 'hdfs:///user/leiyang/out']) iter_job = ShortestPathIter(args=['hdfs:///user/leiyang/in/part*', '--source', source, '--destination', destination, '--weighted', weighted, '-r', mode, '--output-dir', 'hdfs:///user/leiyang/out']) else: init_job = LongestPathIter(args=[graph, '--source', source, '--weighted', weighted, '-r', mode, '--output-dir', 'hdfs:///user/leiyang/out']) iter_job = LongestPathIter(args=['hdfs:///user/leiyang/in/part*', '--source', source, '--weighted', weighted, '-r', mode, '--output-dir', 'hdfs:///user/leiyang/out']) if isLongest: path_job = getLongestDistance(args=['hdfs:///user/leiyang/out/part*', '-r', mode]) else: path_job = getPath(args=['hdfs:///user/leiyang/out/part*', '--destination', destination, '-r', mode]) if isWeighted or isLongest: stop_job = isTraverseCompleted(args=['hdfs:///user/leiyang/out/part*', 'hdfs:///user/leiyang/in/part*', '-r', mode]) else: stop_job = isDestinationReached(args=['hdfs:///user/leiyang/out/part*', '--destination', destination, '-r', mode]) # run initialization job with init_job.make_runner() as runner: print str(datetime.datetime.now()) + ': starting initialization job ...' runner.run() # move the result to input folder print str(datetime.datetime.now()) + ': moving results for next iteration ...' call(['hdfs', 'dfs', '-mv', '/user/leiyang/out', '/user/leiyang/in']) # run BFS iteratively
# cattles = {'811':'811'} #VA = {'817':'57.08'} day = 19 #day = [6, 5, 18, 2] # day = [22] low = 9 high = 48 VedioFlag = True #is used for check if there is 'avi' file inside the folder or just the pictures for llx in cattles: #pathNameGroup = getPath('/Volumes/WA03-1/WA03 2016 4/' + str(llx), 11, [10, 18, 21], [16, 43, 34]) #pathNameGroup = getPath.getPath('/Volumes/WA03-1/WA03 2016 5/' + str(llx), 9, [10, 18, 21], [16, 43, 34]) #pathNameGroup = getPath.getPath('/Volumes/WA02-4/finalset WA02 2016 1 34013s/' + llx, 19, [0, 0, 0], [0, 0, 0]) pathNameGroup = getPath.getPath(input_path + llx, day, low, high) #pathNameGroup = getPath('/Volumes/WA03-1/WA03 2016 1/833', 19, [10, 18, 21], [16, 43, 34]) #print(pathNameGroup) for count in range(len(pathNameGroup)): #for count in range(0,1): pathNameBack = pathNameGroup[count] #pathName = '/Volumes/WA03-1/WA03 2016 1/833/' + pathNameBack #pathName = '/Volumes/WA03-1/WA03 2016 4/' + str(llx) + '/' + pathNameBack #pathName = '/Volumes/WA03-1/WA03 2016 5/' + str(llx) + '/' + pathNameBack #pathName = '/Volumes/WA02-4/finalset WA02 2016 1 34013s/' + llx + '/' + pathNameBack pathName = input_path + llx + '/' + pathNameBack #print(pathName)
@author: Administrator """ import numpy as np import getPath import space """import boundingbox import colorDistribution import hueCount import blur import contrast import bright""" root1='E:/ImageDataset_CUHK/test' root2='E:/ImageDataset_AVA/test' AUHK_test,counts_test=getPath.getPath(root1) np.save('E:/path/CUHK_test.npy',AUHK_test) AVA_test,counts_test=getPath.getPath(root2) np.save('E:/path/AVA_test.npy',AVA_test) """ root='E:/ImageDataset_CUHK/test' root_train = 'E:/ImageDataset/train/' root_test = 'E:/ImageDataset/test/' paths_train,counts_train=getPath.getPath(root_train) paths_test,counts_test=getPath.getPath(root_test) print counts_train print counts_test""" """ qedge_train=space.calculateQuality(paths_train) #边缘空间分布特征 qedge_test=space.calculateQuality(paths_test)
def test(request): start = request.POST['start'] end = request.POST['end'] l = getPath.getPath(start, end) return render_to_response('index/example2.html', {'nodelist': l}, context_instance=RequestContext(request))
V = V / 255.0 hist = np.zeros((20, 1)) for i in range(height): for j in range(width): if S[i][j] > 0.2 and V[i][j] >= 0.15 and V[i][j] <= 0.95: k = H[i][j] / 18 hist[k][0] = hist[k][0] + 1 return hist def hueCount(paths): q_hue = np.array([]) for path in paths: print 'processing ' + path image = cv2.imread(path) hist = calcHist(image) m = hist.max() N = sum(hist > alp * m) qh = 20 - N q_hue = np.append(q_hue, qh) return q_hue root_train = 'E:/ImageDataset/train' root_test = 'E:/ImageDataset/test' #paths_train,counts_train=getPath.getPath(root_train) paths_test, counts_test = getPath.getPath(root_test) #qhue_train=hueCount(paths_train) #np.save('E:/featureData/train/qhue_train.npy',qhue_train) qhue_test = hueCount(paths_test) np.save('E:/featureData/test/qhue_test.npy', qhue_test)