def downloadPage(request): if request.method == "POST": cacheKey = "%s_%s" % (request.META['REMOTE_ADDR'], request.GET.get("X-Progress-ID") ) if request.FILES.get('data-file'): return utils.generateData(request.FILES.get('data-file'), request.POST.get('app'), request.POST.get('appID'), "file", cacheKey) elif request.POST.get("data-url"): return utils.generateData(request.POST.get("data-url"), request.POST.get('app'), request.POST.get('appID'), "url", cacheKey) else: return HttpResponse(status=400) else: return HttpResponse(status=405)
def example(): #import necessary modules to run example from utils import generateData import matplotlib.pyplot as plt #generate data num_points = 100 X, y = generateData(num_points,2,'bull') #perform kmeans on raw data L,C = kplusplus(X.T,2) print 'labels: ', L, '\nCenters: \n', C #display results plt.figure(1) plt.subplot(121,aspect='equal') plt.plot(X[y==+1,0],X[y==+1,1],linestyle='None',marker='x',color='r',label='Generated +1 labels') plt.plot(X[y==-1,0],X[y==-1,1],linestyle='None',marker='x',color='b',label='Generated -1 labels') plt.legend(loc=4,fontsize='x-small',numpoints=1) plt.title('Raw Data') plt.axes([-0.5 ,1.5 ,-0.5 ,1.5]) plt.subplot(122,aspect='equal') plt.plot(X[L==0,0],X[L==0,1],linestyle='None',marker='o',color='r',label='Group 1 cluster') plt.plot(X[L==1,0],X[L==1,1],linestyle='None',marker='o',color='b',label='Group 2 cluster') plt.legend(loc=4,fontsize='x-small',numpoints=1) plt.title('Clustered Data') plt.show()
def example(): #import necessary modules to run example from utils import generateData import matplotlib.pyplot as plt #generate data num_points = 100 X, y = generateData(num_points, 2, 'bull') #perform kmeans on raw data L, C = kplusplus(X.T, 2) print('labels: ', L, '\nCenters: \n', C) #display results plt.figure(1) plt.subplot(121, aspect='equal') plt.plot(X[y == +1, 0], X[y == +1, 1], linestyle='None', marker='x', color='r', label='Generated +1 labels') plt.plot(X[y == -1, 0], X[y == -1, 1], linestyle='None', marker='x', color='b', label='Generated -1 labels') plt.legend(loc=4, fontsize='x-small', numpoints=1) plt.title('Raw Data') plt.axes([-0.5, 1.5, -0.5, 1.5]) plt.subplot(122, aspect='equal') plt.plot(X[L == 0, 0], X[L == 0, 1], linestyle='None', marker='o', color='r', label='Group 1 cluster') plt.plot(X[L == 1, 0], X[L == 1, 1], linestyle='None', marker='o', color='b', label='Group 2 cluster') plt.legend(loc=4, fontsize='x-small', numpoints=1) plt.title('Clustered Data') plt.show()
def testSpeedWithLib(): numDatas = [ 1000, 10000, 20000, 30000, 40000, 50000, 60000, 70000, 80000, 90000, 100000, 200000, 400000, 600000, 800000, 1000000 ] fileBackup = 'compareLibApyori/backup.json' _, items = loadBaseDataSet() target = [] optimizedLib = [] for numData in numDatas: gTransactions = generateData(items, numData) transactions = gTransactions print(f'Running with {len(transactions)}-dataset'.center(100, ' ')) min_support = 0.02 min_confidence = 0.1 min_lift = 0.0 start1 = time.time() result1 = list( targetApriori(transactions=transactions, items=items, min_confidence=min_confidence, min_support=min_support, min_lift=min_lift, numReduce=5)) end1 = time.time() target.append(end1 - start1) start2 = time.time() result2 = list( pyLibOptimizedApriori(transactions, min_confidence=min_confidence, min_support=min_support, min_lift=min_lift)) end2 = time.time() optimizedLib.append(end2 - start2) print('Backing up...') backup(fileBackup, numDatas, target, optimizedLib) labels = ['target', 'optimizedLib'] labelD = {'y': 'Time (s)', 'x': 'Num Transactions'} backup(fileBackup, numDatas, target, optimizedLib) drawPlot(numDatas, [target, optimizedLib], labels, labelD).savefig('compareLibApyori/reslut.png') print('Done!'.center(100, ' '))
def testSpeedWithNormal(): fileBackup = 'compareNormal/backup.json' _, items = loadBaseDataSet() numDatas = [1000, 1500, 2000, 2500, 3000, 4000, 5000] target = [] purePriori = [] for numData in numDatas: gTransactions = generateData(items, numData) transactions = gTransactions print(f'Running with {len(transactions)}-dataset'.center(100, ' ')) min_support = 0.02 min_confidence = 0.1 min_lift = 0.0 start1 = time.time() result1 = list( targetApriori(transactions=transactions, items=items, min_confidence=min_confidence, min_support=min_support, min_lift=min_lift, numReduce=5)) end1 = time.time() target.append(end1 - start1) start2 = time.time() result2 = list(pureApriori(transactions, min_support)) end2 = time.time() purePriori.append(end2 - start2) print('Backing up...') backup(fileBackup, numDatas, target, purePriori) labels = ['target', 'pureFuncion'] labelD = {'y': 'Time (s)', 'x': 'Num Transactions'} backup(fileBackup, numDatas, target, purePriori) drawPlot(numDatas, [target, purePriori], labels, labelD).savefig('compareNormal/reslut.png') print('Done!'.center(100, ' '))
y = np.zeros(T) q = np.sqrt(Q) r = np.sqrt(R) x[0] = x0 # deterministic initial state y[0] = g(x[0]) + r * np.random.normal(size=1) for t in range(1, T): x[t] = f(x[t - 1], t - 1) + q * np.random.normal(size=1) y[t] = g(x[t]) + r * np.random.normal(size=1) return x, y R = 1. Q = 0.1 f = stateTransFunc g = transferFunc # Generate data x, y = utils.generateData(R=R, Q=Q, T=T) sis = SequentialImportanceSampling(f, g, Q=Q, R=R, x0=0) sis.generateWeightedParticles(y) fig, (ax1, ax2, ax3) = plt.subplots(1, 3) ax1.hist(sis.normalisedWeights[:, 1]) ax2.hist(sis.normalisedWeights[:, 10]) ax3.hist(sis.normalisedWeights[:, 50]) #plt.plot(range(N), sis.normalisedWeights[:, 10]) plt.show()
#Z_ScoreNorm(trainData, "training/features_audio/arousal/trainArousal_norm.pkl") #Z_ScoreNorm(validData+testData, "training/features_audio/arousal/devArousal_norm.pkl") # print("Done!") #Normalizs(trainData, "trainArousal_normed.pkl") #Normalizs(validData+testData, "devArousal_normed.pkl") #-------------------------------------------------------- #durations = [0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95] #n_skips = [100,125,150] durations = [0.6] n_skips = [150] for duration in durations: for n_skip in n_skips: print(duration, n_skip) shuffled_trainData = generateData(trainData, duration, n_skip) n_sample = len(shuffled_trainData) filename = "avec_data/training/features_audio/valence/generated_train_" + str( duration) + "_" + str(n_skip) + "_" + str(n_sample) + ".pkl" if os.path.exists(filename): print("File exits. continue...") continue with open(filename, 'wb') as f: pickle.dump(shuffled_trainData, f) #---------------------------------------------------------------------- # shuffled_trainData = generateData(validData+testData, duration, n_skip) # n_sample = len(shuffled_trainData) # print(n_sample) # with open("data/AVEC2015/training/features_video_appearance/arousal/generated_dev_"+str(duration)+"_"+str(
from gmm1d import GMM1D from gmm import GMM from utils import generateData if "__main__" == __name__: # 1D data X1 = generateData((400, 1), seed=42) gmm = GMM1D(X1, K=2) gmm.fit(verbose=True, plot=False) # gmm.plot() #ND data X2 = generateData((400, 2), seed=42) g = GMM(X2) g.fit()
mdir = '/media/dhingratul/Storage-unix/Dataset/LFW/' lfw = 'lfw-deepfunneled/' ann1 = 'annotations/male_names.txt' ann2 = 'annotations/female_names.txt' male = pd.read_table(mdir + ann1, header=None).as_matrix().tolist() male_set = set([item for sublist in male for item in sublist]) female = pd.read_table(mdir + ann2, header=None).as_matrix().tolist() female_set = set([item for sublist in female for item in sublist]) files = os.listdir(mdir+lfw) # Model Selection model = 1 # PCA genData = False if model == 0: # Deep Learning based if genData is True: X, y = utils.generateData(mdir, lfw, files, male_set, female_set) # Pickle Y = np.array(y) pickle.dump((X, Y), open("../model/data.p", "wb")) # SVM X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.1, random_state=0) clf = SVC() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) sc = clf.score(X_test, y_test) print("Score for model {} is {}".format(model, sc)) joblib.dump(clf, '../model/svm_model.pkl') else: # Read Pickle X, Y = pickle.load(open("../model/data.p", "rb"))
x_ref = self.sampleStateTrajectory() return x_ref if __name__ == '__main__': # Set up some parameters N = 10 # Number of particles T = 21 # Length of data record f1 = utils.stateTransFunc g1 = utils.transferFunc # R = 1. # Q = 0.1 x_ref = np.zeros(T) # Generate data x, y = utils.generateData(R=1.0, Q=0.1, T=T) def stateTransFunc(x, t=0): return .1 + np.sin(x) def transferFunc(x): return x f1 = stateTransFunc g1 = transferFunc # R = 1. # Q = 0.1 x_ref = np.zeros(T) # Generate data def generateData(f=stateTransFunc, g=transferFunc, Q=1, R=1, x0=0, T=100):