signal_dct_02.append( dct ( signal[window*WINDOW_SIZE-WINDOW_DELAY:window*WINDOW_SIZE] ) ) signal_pca = PCA ( dct ( signal[window*WINDOW_SIZE-WINDOW_DELAY:window*WINDOW_SIZE] ) ) for vector in zip ( signal_pca.a.T[0], signal_pca.a.T[1], signal_pca.a.T[2] ): if activity == 1: print >>ACTIVITY_01, vector[0], vector[1], vector[2] else: print >>ACTIVITY_02, vector[0], vector[1], vector[2] print >>training_output_file, str(vector[0]) + '\t' + str(vector[1]) + '\t' + str(vector[2]) + '\t' + str(signal_class) print >>testing_output_file, str(vector[0]) + '\t' + str(vector[1]) + '\t' + str(vector[2]) # FANCY STUFF mlx, mly = mlpy.data_fromfile('data_from_analyzer_01/train_data1.dat'); svm = mlpy.Svm(kernel='gaussian', C= 0.9); svm.compute(mlx, mly) mltx, mlty = mlpy.data_fromfile('data_from_analyzer_01/train_data1.dat'); for vector in mltx: if svm.predict(vector) == 1: print >>SVM_PREDICT_01, vector[0], vector[1], vector[2] else: print >>SVM_PREDICT_02, vector[0], vector[1], vector[2] for vector in signal_dct_01: print >>SIGNAL_DCT_01, vector[0], vector[1], vector[2]
xtr = np.array([[7.0, 2.0, 3.0, 1.0], # first sample [1.0, 2.0, 3.0, 2.0], # second sample [2.0, 2.0, 2.0, 1.0], # third sample# [2.0, 4.0, 2.0, 6.0], [2.0, 2.0, 7.0, 9.0]]) print xtr print np.size(xtr), np.shape(xtr), np.ndim(xtr), xtr.dtype ytr = np.array([1, 2, 3, 1, 2]) # classes print ytr print np.size(ytr), np.shape(ytr), np.ndim(ytr), xtr.dtype #Save and read data from disk print mlpy.data_tofile('data_example.dat', xtr, ytr, sep=' ') x, y = mlpy.data_fromfile('data_example.dat') print x print y print "mlpy.data_normalize(x) = ", mlpy.data_normalize(x) #mysvm = mlpy.Svm() # initialize Svm class myknn = mlpy.Knn(k = 1) # initialize knn class ## initialize fda class myfda = mlpy.Fda() #print mysvm.compute(xtr, ytr) # compute SVM print myknn.compute(xtr, ytr) # compute knn print myfda.compute(xtr, ytr) # compute fda
print count metadata = post['json'] data = json.loads(str(metadata)) newPost = getPost.parsePostData(data["records"][0]) negativity = float(values[3]) if negativity > 60.0: g = open("trainingData.txt","a") g.write(str(postid)+","+str(newPost.likeCount)+","+str(newPost.commentCount)+","+str(newPost.repostCount)+","+str("1")+"\n") g.close() else: g = open("trainingData.txt","a") g.write(str(postid)+","+str(newPost.likeCount)+","+str(newPost.commentCount)+","+str(newPost.repostCount)+","+str("0")+"\n") g.close() except Exception: continue g.close() f.close() #writePostIdsToFile() #writePostInformationToTheFile("fortyPercentOrMorePosts.txt") x, y = mlpy.data_fromfile('data.dat') print "done"
[7.0, 2.0, 3.0, 1.0], # first sample [1.0, 2.0, 3.0, 2.0], # second sample [2.0, 2.0, 2.0, 1.0], # third sample# [2.0, 4.0, 2.0, 6.0], [2.0, 2.0, 7.0, 9.0] ]) print xtr print np.size(xtr), np.shape(xtr), np.ndim(xtr), xtr.dtype ytr = np.array([1, 2, 3, 1, 2]) # classes print ytr print np.size(ytr), np.shape(ytr), np.ndim(ytr), xtr.dtype #Save and read data from disk print mlpy.data_tofile('data_example.dat', xtr, ytr, sep=' ') x, y = mlpy.data_fromfile('data_example.dat') print x print y print "mlpy.data_normalize(x) = ", mlpy.data_normalize(x) #mysvm = mlpy.Svm() # initialize Svm class myknn = mlpy.Knn(k=1) # initialize knn class ## initialize fda class myfda = mlpy.Fda() #print mysvm.compute(xtr, ytr) # compute SVM print myknn.compute(xtr, ytr) # compute knn print myfda.compute(xtr, ytr) # compute fda