def analystTestData2(): ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() idxs = [] mouses = [] goals = [] for i in range(1000): idx, mouse, goal, _ = ds.readTestFile() mouses.append(mouse) goals.append(goal) # print goals with open('./data/BDC1236_20170607.txt', 'r') as f: while True: idx = f.readline() idx = int(idx) if idx > 1000: break idxs.append(idx) idxs = np.array(idxs) print idxs # print mouses[0] dw = datadraw.DataDraw('3d') colors = dw.getColorsValue() start = 10
def assemble(): ds = dataset.DataSet() ds.getTrainData() # dw=datadraw.DataDraw('2d') mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] # print mouses[492],goals[492] for i in range(n): vector.append(getvector(1, mouses[i], goals[i], 1)[0]) vector = np.array(vector) dt = datadeal.DataTrain() clf = MLPClassifier(alpha=1e-4, activation='tanh', hidden_layer_sizes=(20, 10), random_state=1) # clf = SVC(C=1.35,kernel='poly',degree=4,gamma=1,coef0=1.6) # dt.trainTest(clf,vector,labels) dt.train(clf, vector, labels) dt.testResultAll(ds, getvector, savepath='./data/0623tmp.txt')
def notgetanalyst(): ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() y = ds.train["labels"] mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] with open('./data/notget.txt', 'r') as f: s = f.read() wrongs = s.split() idxw = np.array(wrongs, dtype='int') - 1 # print len(idxw) mousew = [mouses[i] for i in idxw] # print len(mousew) # mousew=mouses[] # print idxw dw = datadraw.DataDraw('3d') colors = dw.getColorsValue() count = 0 dw.draw3dline(mouses[0], c=colors[count % 10]) for i in range(len(mousew)): count += 1 if count > 10: break dw.draw3dline(mousew[i], c=colors[count % 10]) # goal=goals[i] # dw.draw3dgoal([goal[0],goal[1]+2000,0],c=colors[count%10]) plt.show()
def analystTestData(): ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() idxs = [] mouses = [] goals = [] for i in range(1000): idx, mouse, goal, _ = ds.readTestFile() if mouse[1][0] < 0: mouse[1][0] = 2100 if mouse[0][0] > 1000: mouse[0][0] -= 400 mouses.append([mouse[0][0], mouse[1][0]]) goals.append(goal) with open('./data/BDC1236_20170607.txt', 'r') as f: while True: idx = f.readline() idx = int(idx) if idx > 1000: break idxs.append(idx) idxs = np.array(idxs) idxs = idxs - 1 # print sum(idxs) mouses = np.array(mouses) dw = datadraw.DataDraw('2d') dw.drawbatchgoal(mouses, c='b') dw.drawbatchgoal(mouses[idxs], c='r') plt.show()
def startendvector(): ds=dataset.DataSet() ds.getTrainData() dw=datadraw.DataDraw('2d') mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] def getvecotr(mouse,goal): n=len(mouse[0]) ex=mouse[0][n-1] ey=mouse[1][n-1] et=mouse[2][n-1] bx=mouse[0][0] by=mouse[1][0] bt=mouse[2][0] if n>1: tmp=(bx-ex)**2+(by-ey)**2+(by-ey)**2 else: tmp=(bx)**2+(by)**2+(by)**2 bx=by=bt=0.0 tmp=tmp**0.5 if tmp<1e-3: tmp=1.0 return [(ex-bx)/tmp,(ey-by)/tmp,(et-bt)/tmp] vector=[] for i in range(n): vector.append(getvecotr(mouses[i],goals[i])) vector=np.array(vector) dt=datadeal.DataTrain() # clf = MLPClassifier(alpha=1e-3, hidden_layer_sizes=(2,2), random_state=1) clf = SVC() dt.trainTest(clf,vector,labels)
def getAssembleResult(): ds = dataset.DataSet() # ds.test_file_path=ds.train_file_path ds.getTrainData() dt = datadeal.DataTrain() clf = SVC(C=0.55) # clf = MLPClassifier(alpha=1e-5, hidden_layer_sizes=(10), random_state=1) y = ds.train["labels"] mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] X = [] for i in range(n): X.append(getvector(1, mouses[i], goals[i], 1)[0]) # print X # exit() X = np.array(X) # print X.shape # dt.trainTest(clf,X,y) dt.train(clf, X, y) # dt.testResultAll(ds,getvector,savepath='./data/0607tmp.txt') dt.testResultAll(ds, getvector, savepath='./data/assemble.txt')
def axist(): ds=dataset.DataSet() ds.getTrainData() dw=datadraw.DataDraw('2d') mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] def getvecotr(mouse,goal): tn=len(mouse[2]) t=mouse[2] if tn==1: return 0 return t[tn-1]-t[0] vector=[] for i in range(n): vector.append(getvecotr(mouses[i],goals[i])) vector=np.array(vector,dtype=np.float) # dw.drawline([range(2600),vector[:2600]],c='b') # dw.drawline([range(2600,3000),vector[2600:]],c='r') # print vector[:2600].mean() # print vector[2600:].mean() # dw.drawbatchgoal(np.array([vector[:2600],labels[:2600]]).T,c='b') # dw.drawbatchgoal(np.array([vector[2600:],labels[2600:]]).T,c='r') # plt.show() # vector=np.array(vector) dt=datadeal.DataTrain() # # clf = MLPClassifier(alpha=1e-3, hidden_layer_sizes=(2,2), random_state=1) clf = SVC() dt.trainTest(clf,vector.reshape([3000,1]),labels)
def endgoalvector(): ds=dataset.DataSet() ds.getTrainData() dw=datadraw.DataDraw('2d') mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] def getvecotr(mouse,goal): n=len(mouse[0]) ex=mouse[0][n-1] ey=mouse[1][n-1] gx=goal[0] gy=goal[1] tmp=(gx-ex)**2+(gy-ey)**2 tmp=tmp**0.5 return [(gx-ex)/tmp,(gy-ey)/tmp] vector=[] for i in range(n): vector.append(getvecotr(mouses[i],goals[i])) vector=np.array(vector) dt=datadeal.DataTrain() # clf = MLPClassifier(alpha=1e-3, hidden_layer_sizes=(2,2), random_state=1) clf = SVC() dt.trainTest(clf,vector,labels)
def assemble(): ds=dataset.DataSet() ds.getTrainData() # dw=datadraw.DataDraw('3d') mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] vector=[] for i in range(n): vector.append(getvector(1,mouses[i],goals[i],1)[0]) vector=np.array(vector) dt=datadeal.DataTrain() clf = MLPClassifier(alpha=1e-8,activation='logistic', \ hidden_layer_sizes=(16,16),random_state=0,solver='lbfgs',\ max_iter=600) # clf = SVC(C=1.35,kernel='poly',degree=4,gamma=1,coef0=1.6) # False test=True if test==True: dt.trainTest(clf,vector,labels) else: dt.train(clf,vector,labels) dt.testResultAll(ds,getvector,savepath='./data/0626tmp.txt')
def demotest(): # test data ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() clf = SVC(C=1) X = ds.getPosOfMouse(0) y = ds.train["labels"] dt.trainTest(clf, X, y)
def main(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] # print get_yept(mouses[2601]) # exit() # print get_sharp(mouses[2960]) vtr_sharp = [] lb_sharp = [] idxs = [] for i in range(n): if flt_mac(mouses[i]) == True: if i not in range(2700, 2800) and i not in range(2600): continue idxs.append(i) vtr_sharp.append(get_feats(1, mouses[i], goals[i], 1)) lb_sharp.append(labels[i]) vtr_sharp = np.array(vtr_sharp) lb_sharp = np.array(lb_sharp) # print vtr_sharp.shape # exit(0) vtr_sharp = preprocessing.scale(vtr_sharp) # print vtr_sharp[1] # print vtr_sharp[1000] # print idxs[1000] # plt.plot(range(3),vtr_sharp[1]) # plt.plot(range(3),vtr_sharp[1000]) # plt.show() # exit() # mouse=mouses[idxs[1]] # plt.plot(mouse[0],mouse[1],c='r') # mouse=mouses[idxs[1000]] # plt.plot(mouse[0],mouse[1],c='g') # plt.show() # exit() # plt.title('the cx cy machine') # for i in range(len(vtr_sharp)): # if lb_sharp[i]==0: # plt.plot(range(3),vtr_sharp[i],c='r') # else: # plt.plot(range(3),vtr_sharp[i],c='g') # plt.show() # exit() # print vtr_sharp[0] # print vtr_sharp[-1] dt = datadeal.DataTrain() # about 17 w clf = SVC(C=2.5, kernel="linear") # clf = MultinomialNB() dt.trainTest(clf, vtr_sharp, lb_sharp, 10.0)
def main(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] # print get_angle_sharp(mouses[2960]) vtr_sharp = [] lb_sharp = [] idxs = [] for i in range(n): if get_xpn_mac(mouses[i]) == True: if i in range(2600, 2900): continue idxs.append(i) vtr_sharp.append(get_feats(1, mouses[i], goals[i], 1)) lb_sharp.append(labels[i]) vtr_sharp = np.array(vtr_sharp) lb_sharp = np.array(lb_sharp) # print vtr_sharp.shape # print vtr_sharp[1] # print vtr_sharp[1000] # print idxs[1000] # plt.plot(range(6),vtr_sharp[1]) # plt.plot(range(6),vtr_sharp[1000]) # plt.show() # exit() # mouse=mouses[idxs[1]] # plt.plot(mouse[0],mouse[1],c='r') # mouse=mouses[idxs[1000]] # plt.plot(mouse[0],mouse[1],c='g') # plt.show() vtr_sharp = preprocessing.scale(vtr_sharp) # plt.title('the sharp shape of machine') # for i in range(len(vtr_sharp)): # if lb_sharp[i]==0: # plt.plot(range(6),vtr_sharp[i],c='r') # else: # plt.plot(range(6),vtr_sharp[i],c='g') # plt.show() # exit() # print vtr_sharp[0] # print vtr_sharp[-1] dt = datadeal.DataTrain() # about 17 w clf = SVC(C=1.5) dt.trainTest(clf, vtr_sharp, lb_sharp, 10.0)
def main(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] for i in range(n): vector.append(getfeature(1, mouses[i], goals[i], 1)[0]) vector = np.array(vector) scaler_vector = vector vector = preprocessing.scale(vector) vector = np.c_[scaler_vector[:, 0], vector[:, 1:]] # printsomething(vector) # print len(vector[0]) # exit() pca = PCA(n_components=15) pca.fit(vector) vector = pca.transform(vector) dt = datadeal.DataTrain() # about 17 w clf = MLPClassifier(alpha=0.9, activation='logistic', \ hidden_layer_sizes=(15,19),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) print clf # clf = MLPClassifier(alpha=1e-4, # activation='logistic', \ # hidden_layer_sizes=(16,18),random_state=0,solver='lbfgs',\ # max_iter=400) # False test = False if test == True: dt.trainTest(clf, vector, labels, 4.0) else: scaler = preprocessing.StandardScaler().fit(scaler_vector) dt.train(clf, vector, labels) dt.testResultAll(ds, getfeature, savepath='./data/0706tmp.txt', stop=1200, scal=scaler, pca=pca)
def getReuslt3(): ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() clf = MLPClassifier(alpha=1e-5, hidden_layer_sizes=(20), random_state=1) y = ds.train["labels"] X = ds.train["goals"] X = np.mat(X) # dt.trainTest(clf,X,y) dt.train(clf, X, y) def f(idx, mouse, goal, label): if idx == False: return False return np.array(goal).reshape([1, 2]) dt.testResultAll(ds, f, savepath='./data/ann_goal.txt')
def main(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] for i in range(n): vector.append(getfeature(1, mouses[i], goals[i], 1)[0]) vector = np.array(vector) scaler_vector = vector vector = preprocessing.scale(vector) vector = np.c_[vector[:, 1:], scaler_vector[:, 1]] # print vector[0] pca = PCA(n_components=16) pca.fit(vector) vector = pca.transform(vector) dt = datadeal.DataTrain() # about 2w clf = MLPClassifier(alpha=1e-6, activation='logistic', \ hidden_layer_sizes=(16,18),random_state=0,solver='lbfgs',\ max_iter=800) # clf = MLPClassifier(alpha=1e-4, # activation='logistic', \ # hidden_layer_sizes=(16,18),random_state=0,solver='lbfgs',\ # max_iter=400) # False test = False if test == True: dt.trainTest(clf, vector, labels, 10.0) else: scaler = preprocessing.StandardScaler().fit(scaler_vector) dt.train(clf, vector, labels) dt.testResultAll(ds, getfeature, savepath='./data/0704tmp.txt', stop=-1, scal=scaler, pca=pca)
def assemble(): ds = dataset.DataSet() ds.getTrainData() dw = datadraw.DataDraw('2d') mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] # print mouses[492],goals[492] for i in range(n): vector.append(getvector(1, mouses[i], goals[i], 1)) vector = np.array(vector) # print vector.shape dt = datadeal.DataTrain() # clf = MLPClassifier(alpha=1e-3, hidden_layer_sizes=(40), random_state=1) clf = SVC() dt.trainTest(clf, vector, labels)
def axisy(): ds=dataset.DataSet() ds.getTrainData() dw=datadraw.DataDraw('2d') mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] def getvecotr(mouse,goal): yn=len(mouse[1]) y=mouse[1] if yn==1: return 0 for i in range(yn)[-1:0:-1]: y[i]=y[i]-y[i-1] flag=1 state=y[0] ychange=0 for i in range(1,yn): if state*y[i]<0: ychange+=1 state=y[i] return ychange vector=[] for i in range(n): vector.append(getvecotr(mouses[i],goals[i])) vector=np.array(vector,dtype=np.float) # dw.drawline([range(2600),vector[:2600]],c='b') # dw.drawline([range(2600,3000),vector[2600:]],c='r') # print vector[:2600].mean() # print vector[2600:].mean() # dw.drawbatchgoal(np.array([vector[:2600],labels[:2600]]).T,c='b') # dw.drawbatchgoal(np.array([vector[2600:],labels[2600:]]).T,c='r') # plt.show() # vector=np.array(vector) dt=datadeal.DataTrain() # # clf = MLPClassifier(alpha=1e-3, hidden_layer_sizes=(2,2), random_state=1) clf = SVC() dt.trainTest(clf,vector.reshape([3000,1]),labels)
def getReuslt_mse(): ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() mouses = ds.train["mouses"] labels = ds.train["labels"] def getReuslt_mse_getdata(mouses): mse = [] for v in mouses: mse.append([v[0][0], v[1][0], np.std(v[1]), np.std(v[2])]) return np.array(mse) mse = getReuslt_mse_getdata(mouses) # drawing picture to analyst ================================== # dw=datadraw.DataDraw(typex='3d') # print goals.shape # dw.drawgoal([mse[:2600,0],mse[:2600,1]],'b') # dw.drawgoal([mse[2600:,0],mse[2600:,1]],'r') # for i in range(2600): # dw.draw3dgoal(mse[i],c='b') # for i in range(2600,3000): # dw.draw3dgoal(mse[i],c='r') # plt.show() # dw.drawbatchgoal(,'r') # drawing picture to analyst ================================== # clf=SVC(C=1) clf = MLPClassifier(alpha=1e-3, hidden_layer_sizes=(40), random_state=1) y = ds.train["labels"] X = mse X = np.mat(X) dt.trainTest(clf, X, y) exit() dt.train(clf, X, y) def f(idx, mouse, goal, label): if idx == False: return False tmp = [mouse[0][0], mouse[1][0], np.std(mouse[1]), np.std(mouse[2])] return np.array(tmp).reshape([1, 4]) dt.testResultAll(ds, f, savepath='./data/xytmse0605.txt')
def maintrain(): ds=dataset.DataSet() ds.getTrainData() mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] vector=[] labels_tmp=[] mouses_tmp=[] mouses_tmp2=[] labels_tmp2=[] x437=[] vtr=[] vtrg=[] vtrl=[] for i in range(n): tmp=get_X_PN(mouses[i]) if tmp==False: vtr.append(getfft(mouses[i])) # if i<2600: # vtr.append(getfft(mouses[i])) # else: # vtrg.append(getfft(mouses[i])) vtrl.append(labels[i]) vtr=np.array(vtr) vtrl=np.array(vtrl) # print vtr.shape # print vtr[0] # exit() dt=datadeal.DataTrain() # clf2 = MLPClassifier(alpha=0.2, # activation='logistic', \ # hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ # max_iter=250,early_stopping=True, epsilon=1e-04,\ # # learning_rate_init=0.1,learning_rate='invscaling', # ) clf=SVC(C=0.2) np.set_printoptions(formatter={'float':lambda x: "%d"%float(x)}) # confusion=dt.trainTest(clf,vector,labels,4.0,classn=6,returnconfusion=True) confusion=dt.trainTest(clf,vtr,vtrl,4.0,classn=2,returnconfusion=True)
def assemble(): ds = dataset.DataSet() ds.getTrainData() # dw=datadraw.DataDraw('2d') mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] # print mouses[492],goals[492] for i in range(n): vector.append(getvector(1, mouses[i], goals[i], 1)[0]) # break # exit() vector = np.array(vector) dt = datadeal.DataTrain() clf = MLPClassifier(alpha=1e-6,activation='logistic', \ hidden_layer_sizes=(20,20),random_state=0,solver='lbfgs',\ max_iter=1000) # clf = SVC(C=1.35,kernel='poly',degree=4,gamma=1,coef0=1.6) print vector[0] print vector[10] print vector[1000] print vector[2700] print vector[2800] print vector[2900] exit() # test=False # with open('./data/93.txt','r') as f: # idxstr=f.read() # rightidx=idxstr.split('\n') # print rightidx test = True if test == False: dt.trainTest(clf, vector, labels) else: dt.train(clf, vector, labels) dt.testResultAll(ds, getvector, savepath='./data/0624tmp.txt')
def assemble(): ds = dataset.DataSet() ds.getTrainData() # dw=datadraw.DataDraw('2d') mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] # print getvector(1,mouses[0],goals[0],1) # print getvector(1,mouses[1],goals[1],1) # print getvector(1,mouses[111],goals[111],1) # print getvector(1,mouses[2600],goals[2600],1) # print getvector(1,mouses[2700],goals[2700],1) # print getvector(1,mouses[2777],goals[2777],1) # exit() # print mouses[492],goals[492] for i in range(n): vector.append(getvector(1, mouses[i], goals[i], 1)[0]) vector = np.array(vector) # print vector.shape # dw =datadraw.DataDraw('2d') # dw.drawbatchgoal(vector[0:2600],c='b') # dw.drawbatchgoal(vector[2600:],c='r') # plt.show() dt = datadeal.DataTrain() clf = MLPClassifier(alpha=1e-2, hidden_layer_sizes=(20, 10), random_state=1) # clf = SVC(C=1.65) # dt.trainTest(clf,vector,labels) dt.train(clf, vector, labels) # dt.testResultAll(ds,getvector,savepath='./data/0607tmp.txt') dt.testResultAll(ds, getvector, savepath='./data/0619tmp.txt')
def assemble(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] for i in range(n): vector.append(getfeature(1, mouses[i], goals[i], 1)[0]) vector = np.array(vector) # print vector[0:10] print vector[0:50] print vector[2700:2710] print vector[2800:2830] # exit() dt = datadeal.DataTrain() count = 0 for i in vector: if i == 0: count += 1 print count exit(0) # import sklearn.logistic as logistic # clf=SVC() # clf = MLPClassifier(alpha=1e-4,activation='logistic', \ # hidden_layer_sizes=(16,16),random_state=0,solver='lbfgs',\ # max_iter=600) # clf = SVC(C=1.35,kernel='poly',degree=4,gamma=1,coef0=1.6) # False test = True if test == True: dt.trainTest(clf, vector, labels) else: dt.train(clf, vector, labels) dt.testResultAll(ds, getfeature, savepath='./data/0629tmp.txt')
def getResult2(): ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() clf = SVC(C=1.5) y = ds.train["labels"] mouses = ds.train["mouses"] X = [] for i in range(ds.train["size"]): xs = mouses[i][0] ys = mouses[i][1] X.append([xs[0], ys[0]]) X = np.mat(X) # dt.trainTest(clf,X,y) dt.train(clf, X, y) def f(idx, mouse, goal, label): if idx == False: return False xarr = mouse[0] yarr = mouse[1] return np.array([xarr[0], yarr[0]]).reshape([1, 2]) dt.testResultAll(ds, f, savepath='./data/svm_mouse_start.txt')
def getReulst1(): ds = dataset.DataSet() ds.getTrainData() dt = datadeal.DataTrain() clf = MLPClassifier(alpha=1e-5, hidden_layer_sizes=(10), random_state=1) y = ds.train["labels"] mouses = ds.train["mouses"] X = [] for i in range(ds.train["size"]): xs = mouses[i][0] ys = mouses[i][1] X.append([xs[0], ys[0]]) X = np.mat(X) # dt.trainTest(clf,X,y) dt.train(clf, X, y) def f(idx, mouse, goal, label): if idx == False: return False xarr = mouse[0] yarr = mouse[1] return np.array([xarr[0], yarr[0]]).reshape([1, 2]) dt.testResultAll(ds, f, savepath='./data/ann_mouse_start.txt')
def maintest(): ds=dataset.DataSet() ds.getTrainData() mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] mouses_tmp=[] labels_tmp=[] mouses_tmp2=[] labels_tmp2=[] for i in range(n): tmp=getfeature(i,mouses[i],goals[i],0,use_all=True) if type(tmp) is bool: # print i,tmp continue if i in range(0,2600): continue if i in range(2600,2650): labels_tmp.append(1) elif i in range(2650,2700): labels_tmp.append(2) elif i in range(2700,2800): labels_tmp.append(1) elif i in range(2800,2900): labels_tmp.append(2) else: labels_tmp.append(1) mouses_tmp.append(tmp) for i in range(n): tmp=getfeature2(i,mouses[i],goals[i],0,use_all=False) if type(tmp) is bool: # print i,tmp continue labels_tmp2.append(labels[i]) mouses_tmp2.append(tmp) labels=np.array(labels_tmp) vector=np.array(mouses_tmp) labels2=np.array(labels_tmp2) vector2=np.array(mouses_tmp2) scaler_vector=vector scaler_vector2=vector2 # for i in range(20): # plt.plot(range(7),vector2[i],c='g') # for i in range(1300,1320): # plt.plot(range(7),vector2[i],c='b') # plt.show() # print vector2.shape # exit() np.set_printoptions(formatter={'float':lambda x: "%5.3f"%float(x)}) vector = preprocessing.scale(vector) vector2 = preprocessing.scale(vector2) # pca = PCA(n_components=9) # pca.fit(vector) # vector=pca.transform(vector) dt=datadeal.DataTrain() clf = MLPClassifier(alpha=0.5, activation='logistic', \ hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) clf2 = MLPClassifier(alpha=0.2, activation='logistic', \ hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) clf.fit(vector,labels) clf2.fit(vector2,labels2) scaler = preprocessing.StandardScaler().fit(scaler_vector) scaler2 = preprocessing.StandardScaler().fit(scaler_vector2) config={ "scaler":scaler, "clf":clf, "pca":'', "savepath":'./data/18b/', "stop":-1, "scaler_line":scaler2, "clf_line":clf2, } testResult(config)
def maintrain(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] labels_tmp = [] mouses_tmp = [] mouses_tmp2 = [] labels_tmp2 = [] for i in range(n): tmp = getfeature(i, mouses[i], goals[i], 0, use_all=True) # tmp=getfeaturetest(i,mouses[i],goals[i],0,use_all=False) if type(tmp) is bool: # print i,tmp continue if i in range(0, 2600): continue if i in range(2600, 2650): # labels_tmp.append(1) labels_tmp.append(1) elif i in range(2650, 2700): # labels_tmp.append(2) labels_tmp.append(2) # labels_tmp.append(2) elif i in range(2700, 2800): labels_tmp.append(1) elif i in range(2800, 2900): labels_tmp.append(2) else: labels_tmp.append(1) mouses_tmp.append(tmp) for i in range(n): tmp = getfeature2(i, mouses[i], goals[i], 0, use_all=False) # tmp=getfeaturetest(i,mouses[i],goals[i],0,use_all=False) if type(tmp) is bool: # print i,tmp continue labels_tmp2.append(labels[i]) mouses_tmp2.append(tmp) labels2 = np.array(labels_tmp2) vector2 = np.array(mouses_tmp2) # print vector2 # exit(0) labels = np.array(labels_tmp) # print len(mouses_tmp) vector = np.array(mouses_tmp) scaler_vector = vector # for i in range(2000,2050): # print vector[i] # plt.plot(range(5),vector[i],c='b') # for i in range(2600,2650): # print vector[i] # plt.plot(range(5),vector[i],c='g') # plt.show() # exit(0) # print vector.shape # exit() # plt.scatter(range(len(vector)),vector[:,1]) # plt.show() # vector = preprocessing.scale(vector) vector2 = preprocessing.scale(vector2) # print vector2.shape # exit() # pca = PCA(n_components=9) # pca.fit(vector) # vector=pca.transform(vector) dt = datadeal.DataTrain() # about 17 w clf2 = MLPClassifier(alpha=0.2, activation='logistic', \ # 11,11 hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) np.set_printoptions(formatter={'float': lambda x: "%d" % float(x)}) # confusion=dt.trainTest(clf,vector,labels,4.0,classn=6,returnconfusion=True) confusion = dt.trainTest(clf2, vector2, labels2, 4.0, classn=2, returnconfusion=True) # dw=datadraw.DataDraw('2d') # genre_list, name, title,max,save=False confusion = confusion**0.1
def maintest(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] mouses_tmp = [] labels_tmp = [] mouses_tmp2 = [] labels_tmp2 = [] # idxs=np.random.randint(0,2600,50) # print get_distribution_angle(mouses[0]) # exit() for i in range(n): tmp = getfeature(i, mouses[i], goals[i], 0, use_all=True) # tmp=getfeaturetest(i,mouses[i],goals[i],0,use_all=False) if type(tmp) is bool: # print i,tmp continue if i in range(0, 2600): continue if i in range(2600, 2650): # labels_tmp.append(1) labels_tmp.append(1) elif i in range(2650, 2700): # labels_tmp.append(2) labels_tmp.append(2) # labels_tmp.append(2) elif i in range(2700, 2800): labels_tmp.append(1) elif i in range(2800, 2900): labels_tmp.append(2) else: labels_tmp.append(1) mouses_tmp.append(tmp) for i in range(n): tmp = getfeature2(i, mouses[i], goals[i], 0, use_all=False) # tmp=getfeaturetest(i,mouses[i],goals[i],0,use_all=False) if type(tmp) is bool: # print i,tmp continue labels_tmp2.append(labels[i]) mouses_tmp2.append(tmp) labels2 = np.array(labels_tmp2) vector2 = np.array(mouses_tmp2) # for i in range(20): # plt.plot(range(7),vector2[i],c='g') # for i in range(1300,1320): # plt.plot(range(7),vector2[i],c='b') # plt.show() # print vector2.shape # exit() labels = np.array(labels_tmp) vector = np.array(mouses_tmp) scaler_vector = vector scaler_vector2 = vector2 np.set_printoptions(formatter={'float': lambda x: "%5.3f" % float(x)}) # print vector.shape # print vector # exit() # plt.scatter(range(len(vector)),vector[:,3]) # plt.show() # exit() vector = preprocessing.scale(vector) vector2 = preprocessing.scale(vector2) pca = PCA(n_components=9) # pca.fit(vector) # vector=pca.transform(vector) dt = datadeal.DataTrain() # about 17 w clf = MLPClassifier(alpha=0.5, activation='logistic', \ hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ # hidden_layer_sizes=(15,15),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) clf2 = MLPClassifier(alpha=0.2, activation='logistic', \ hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ # hidden_layer_sizes=(15,15),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) clf.fit(vector, labels) clf2.fit(vector2, labels2) scaler = preprocessing.StandardScaler().fit(scaler_vector) scaler2 = preprocessing.StandardScaler().fit(scaler_vector2) testResultAll(clf, scaler, pca, savepath='./data/15/07tmp.txt', stop=-1, clf_extra=clf2, scaler_extra=scaler2)
def maintest(): ds=dataset.DataSet() ds.getTrainData() mouses=ds.train["mouses"] goals=ds.train["goals"] labels=ds.train["labels"] n=ds.train["size"] mouses_tmp=[] labels_tmp=[] mouses_tmp2=[] labels_tmp2=[] mouses_x437=[] labels_x437=[] mouses_sharp=[] labels_sharp=[] for i in range(n): tmp=getfeature(i,mouses[i],goals[i],0) if i in range(0,2600): continue if i in range(2600,2650): labels_tmp.append(1) elif i in range(2650,2700): labels_tmp.append(2) elif i in range(2700,2800): labels_tmp.append(1) elif i in range(2800,2900): labels_tmp.append(2) else: labels_tmp.append(1) mouses_tmp.append(tmp) for i in range(n): if get_X_PN(mouses[i])==True: continue tmp=getfeature2(i,mouses[i],goals[i],0) mouses_tmp2.append(tmp) labels_tmp2.append(labels[i]) for i in range(n): tmp=get_X_PN(mouses[i]) if mouses[i][0][0]>=437 and tmp==False: mouses_x437.append(getf(mouses[i])) labels_x437.append(labels[i]) if get_sharp_angle(mouses[i])==True: # print i mouses_sharp.append(getft(mouses[i])) labels_sharp.append(labels[i]) labels=np.array(labels_tmp) vector=np.array(mouses_tmp) labels2=np.array(labels_tmp2) vector2=np.array(mouses_tmp2) labels_x437=np.array(labels_x437) vector_x437=np.array(mouses_x437) labels_sharp=np.array(labels_sharp) vector_sharp=np.array(mouses_sharp) scaler_vector=vector scaler_vector2=vector2 scaler_vector_x437=vector_x437 scaler_vector_sharp=vector_sharp # print vector_sharp.shape # exit() np.set_printoptions(formatter={'float':lambda x: "%5.3f"%float(x)}) vector = preprocessing.scale(vector) vector2 = preprocessing.scale(vector2) vector_x437 = preprocessing.scale(vector_x437) vector_sharp = preprocessing.scale(vector_sharp) # pca = PCA(n_components=9) # pca.fit(vector) # vector=pca.transform(vector) dt=datadeal.DataTrain() clf = MLPClassifier(alpha=0.5, activation='logistic', \ hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) clf2 = MLPClassifier(alpha=0.2, activation='logistic', \ hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) # clf_x437 = MLPClassifier(alpha=1.2, # activation='logistic', \ # hidden_layer_sizes=(3,5),random_state=0,solver='lbfgs',\ # max_iter=250,early_stopping=True, epsilon=1e-04,\ # # learning_rate_init=0.1,learning_rate='invscaling', # ) #best 275 35 a=1.0 size 11,11 #best 339 68 a=0.8 size 11,11 #best 366 74 a=0.6 size 11,11 #best 421 116 a=0.6 size 11,11 #best 421 116 a=0.6 size 13,13 #best 433 133 a=0.6 size 9,13 #best 433 133 a=0.6 size 13,9 #best 413 110 a=0.6 size 13,7 # clf_x437 = MLPClassifier(alpha=0.6, # activation='logistic', \ # hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ # max_iter=250,early_stopping=True, epsilon=1e-04,\ # # learning_rate_init=0.1,learning_rate='invscaling', # ) clf_x437 = MLPClassifier(alpha=0.6, activation='logistic', \ hidden_layer_sizes=(9,13),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) clf_sharp = MLPClassifier(alpha=0.2, activation='logistic', \ hidden_layer_sizes=(11,11),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) # clf_x437=SVC(C=2.2) # clf2=SVC(C=1.2) clf.fit(vector,labels) clf2.fit(vector2,labels2) clf_x437.fit(vector_x437,labels_x437) clf_sharp.fit(vector_sharp,labels_sharp) scaler = preprocessing.StandardScaler().fit(scaler_vector) scaler2 = preprocessing.StandardScaler().fit(scaler_vector2) scaler_x437 = preprocessing.StandardScaler().fit(scaler_vector_x437) scaler_sharp = preprocessing.StandardScaler().fit(scaler_vector_sharp) config={ "scaler":scaler, "clf":clf, "pca":'', "savepath":'./data/20/', "stop":-1, "scaler_line":scaler2, "clf_line":clf2, "scaler_x437":scaler_x437, "clf_x437":clf_x437, "scaler_sharp":scaler_sharp, "clf_sharp":clf_sharp, } testResult(config)
def mainold(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] # get_sharp_angle(mouses[2921]) # print "===============" # get_sharp_angle(mouses[884]) # exit() vector = [] vector_label = [] labels_tmp = labels.tolist() goals_tmp = goals.tolist() for i in range(2200): idx = np.random.randint(2600, 3000) tmp = mouses[idx] mouses.append(tmp) labels_tmp.append(labels[idx]) goals_tmp.append(goals[idx]) labels = np.array(labels_tmp) goals = np.array(goals_tmp) for i in range(len(labels)): tmp = getfeature(0, mouses[i], goals[i], 0, is_train=False) # tmp=getfeature(0,mouses[i],goals[i],0,is_train=False) if type(tmp) is bool: continue vector.append(tmp[0]) vector_label.append(labels[i]) # for i in range(2200): # idx=np.random.randint(2600,3000) # tmp=vector[idx] # vector.append(tmp) # vector_label.append(labels[idx]) vector = np.array(vector) labels = np.array(vector_label) scaler_vector = vector # print vector.shape # exit() vector = preprocessing.scale(vector) pca = PCA(n_components=30) pca.fit(vector) vector = pca.transform(vector) dt = datadeal.DataTrain() # about 17 w clf = MLPClassifier(alpha=0, activation='logistic', \ hidden_layer_sizes=(30,30),random_state=0,solver='lbfgs',\ max_iter=250,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) # clf=SVC(C=3) # # False test = False if test == True: np.set_printoptions(formatter={'float': lambda x: "%d" % float(x)}) dt.trainTest(clf, vector, labels, 4.0) else: clf.fit(vector, labels) scaler = preprocessing.StandardScaler().fit(scaler_vector) testResultAll(clf, scaler, pca, savepath='./data/0712tmp.txt', stop=12000)
def main(): ds = dataset.DataSet() ds.getTrainData() mouses = ds.train["mouses"] goals = ds.train["goals"] labels = ds.train["labels"] n = ds.train["size"] vector = [] labels_tmp = [] # print get_distribution_angle(mouses[0]) # exit() # print mouses[81][0] # print mouses[81][1] # exit() for i in range(n): # tmp=getfeature(i,mouses[i],goals[i],0,use_all=False) if i in range(2650, 2700): tmp = getfeature(0, mouses[i], goals[i], 0, use_all=True) else: continue # tmp=getfeature(0,mouses[i],goals[i],0,use_all=False) if type(tmp) is bool: continue vector.append(tmp[0]) labels_tmp.append(labels[i]) labels = np.array(labels_tmp) vector = np.array(vector) scaler_vector = vector # print vector # exit() # plt.scatter(range(len(vector)),vector[:,1]) # plt.show() # exit() # print vector.shape # print vector[0] # print vector[1000] # exit() # vector = preprocessing.scale(vector) pca = PCA(n_components=9) # pca.fit(vector) # vector=pca.transform(vector) dt = datadeal.DataTrain() # about 17 w clf = MLPClassifier(alpha=0, activation='logistic', \ hidden_layer_sizes=(12,15),random_state=0,solver='lbfgs',\ max_iter=650,early_stopping=True, epsilon=1e-04,\ # learning_rate_init=0.1,learning_rate='invscaling', ) # clf=SVC(C=3) # print vector.shape # exit() # # False test = False if test == True: np.set_printoptions(formatter={'float': lambda x: "%d" % float(x)}) dt.trainTest(clf, vector, labels, 4.0) else: clf.fit(vector, labels) scaler = preprocessing.StandardScaler().fit(scaler_vector) testResultAll(clf, scaler, pca, savepath='./data/0712tmp.txt', stop=-1)