def get_trained_ann(dataset, ann=None, test_train_prop=0.25, max_epochs=50): tstdata, trndata = dataset.splitWithProportion(test_train_prop) trndata._convertToOneOfMany() tstdata._convertToOneOfMany() if not ann: ann = build_ann(trndata.indim, trndata.outdim) # ann = build_exp_ann(trndata.indim, trndata.outdim) # trainer = RPropMinusTrainer(ann) trainer = BackpropTrainer(ann, dataset=trndata,learningrate=0.01, momentum=0.5, verbose=True) trnresult = tstresult = 0 # for i in range(10): trainer.trainUntilConvergence(maxEpochs=max_epochs, verbose=True) trnresult = percentError( trainer.testOnClassData(), trndata['class'] ) tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] ) # print trnresult, tstresult return ann, trnresult, tstresult
def get_trained_ann(dataset, ann=None, test_train_prop=0.25, max_epochs=50): tstdata, trndata = dataset.splitWithProportion(test_train_prop) trndata._convertToOneOfMany() tstdata._convertToOneOfMany() if not ann: ann = build_ann(trndata.indim, trndata.outdim) # ann = build_exp_ann(trndata.indim, trndata.outdim) # trainer = RPropMinusTrainer(ann) trainer = BackpropTrainer(ann, dataset=trndata, learningrate=0.01, momentum=0.5, verbose=True) trnresult = tstresult = 0 # for i in range(10): trainer.trainUntilConvergence(maxEpochs=max_epochs, verbose=True) trnresult = percentError(trainer.testOnClassData(), trndata['class']) tstresult = percentError(trainer.testOnClassData(dataset=tstdata), tstdata['class']) # print trnresult, tstresult return ann, trnresult, tstresult
# 将数据扁平化 X = datasets.reshape((datasets.shape[0], datasets.shape[1] * datasets.shape[2])) X_train ,X_test, y_train, y_test = train_test_split(X, y, train_size=0.9) # 添加数据到数据格式中 training =SupervisedDataSet(X.shape[1], y.shape[1]) for i in range(X_train.shape[0]): training.addSample(X_train[i], y_train[i]) testing = SupervisedDataSet(X.shape[1], y.shape[1]) for i in range(X_test.shape[0]): testing.addSample(X_test[i], y_test[i]) # 搭建三层网络 net = buildNetwork(X.shape[1], 150, y.shape[1] ,bias=True) # 使用BP算法 trainer = BackpropTrainer(net, training ,weightdecay=0.01) # 训练步数 trainer.trainEpochs(epochs=50) # 保存模型 # model_filename = open('CAPTCHA_predictor.model','wb') # pickle.dump(trainer,model_filename,0) # model_filename.close() predictions = trainer.testOnClassData(dataset=testing) from sklearn.metrics import f1_score,classification_report print(classification_report(y_test.argmax(axis=1), predictions))
trainer = BackpropTrainer(fnn, dataset=traindata, momentum=0.1, verbose=True, weightdecay=0.01) ticks = arange(-3., 6., 0.2) X, Y = meshgrid(ticks, ticks) griddata = ClassificationDataSet(2, 1, nb_classes=3) for i in range(X.size): griddata.addSample([X.ravel()[i],Y.ravel()[i]], [0]) griddata._convertToOneOfMany() for i in range(20): trainer.trainEpochs(1) # usually 5 trainresult = percentError(trainer.testOnClassData(), traindata["class"]) testresult = percentError(trainer.testOnClassData(), testdata["class"]) print("epoch %4d" % trainer.totalepochs, "trainerror %5.2f%%" % trainresult, "testerror %5.2f%%" % testresult) out = fnn.activateOnDataset(griddata) out = out.argmax(axis=1) out = out.reshape(X.shape) figure(1) # might be the wrong import for the following lines ioff() clf() hold(True) for c in [0,1,2]: here, _ = where(testdata["class"]==c)
irisData = datasets.load_iris() dataFeatures = irisData.data dataTargets = irisData.target #plt.matshow(irisData.images[11], cmap=cm.Greys_r) #plt.show() #print dataTargets[11] #print dataFeatures.shape dataSet = ClassificationDataSet(4, 1 , nb_classes=3) for i in range(len(dataFeatures)): dataSet.addSample(np.ravel(dataFeatures[i]), dataTargets[i]) trainingData, testData = splitWithProportion(dataSet,0.7) trainingData._convertToOneOfMany() testData._convertToOneOfMany() neuralNetwork = buildNetwork(trainingData.indim, 7, trainingData.outdim, outclass=SoftmaxLayer) trainer = BackpropTrainer(neuralNetwork, dataset=trainingData, momentum=0.01, learningrate=0.05, verbose=True) trainer.trainEpochs(10000) print('Error (test dataset): ' , percentError(trainer.testOnClassData(dataset=testData), testData['class'])) print('\n\n') counter = 0 for input in dataFeatures: print(counter," output is according to the NN: ", neuralNetwork.activate(input)) counter = counter + 1
alldata = ClassificationDataSet(dim, 1, nb_classes=2) (data,label,items) = BinReader.readData(ur'F:\AliRecommendHomeworkData\1212新版\train15_17.expand.samp.norm.bin') #(train,label,data) = BinReader.readData(r'C:\data\small\norm\train1217.bin') for i in range(len(data)): alldata.addSample(data[i],label[i]) tstdata, trndata = alldata.splitWithProportion(0.25) trainer = BackpropTrainer(n,trndata,momentum=0.1,verbose=True,weightdecay=0.01) print 'start' #trainer.trainEpochs(1) trainer.trainUntilConvergence(maxEpochs=2) trnresult = percentError(trainer.testOnClassData(),trndata['class']) tstresult = percentError(trainer.testOnClassData(dataset=tstdata), tstdata['class']) print "epoch: %4d" % trainer.totalepochs, \ " train error: %5.2f%%" % trnresult, \ " test error: %5.2f%%" % tstresult print 'get result' #trainer.trainUntilConvergence() #out = ClassificationDataSet(37,1) #(test,label,items) = BinReader.readData(r'C:\data\homework\1218t5w.bin') ##(test,label,data) = BinReader.readData(r'C:\data\small\norm\test1218.bin') #for i in range(len(test)): # temp = [0]
test.addSample(test_t.getSample(k)[0], test_t.getSample(k)[1]) for k in xrange(0, training_t.getLength()): training.addSample(training_t.getSample(k)[0], training_t.getSample(k)[1]) print(training.getLength()) print(test.getLength()) print(test.indim) print(test.outdim) print(training.indim) print(training.outdim) fnn = buildNetwork(training.indim, 64, training.outdim, outclass=SoftmaxLayer) trainer = BackpropTrainer(fnn, dataset=training, momentum=0.1, learningrate=0.01, verbose=True, weightdecay=0.01) trainer.trainEpochs(10) print(percentError(trainer.testOnClassData(), training['class'])) print(percentError(trainer.testOnClassData(dataset=test), test['class'])) plt.imshow(digits.images[0], cmap=plt.cm.gray_r, interpolation='nearest') plt.show() for i in range(0, 10): print(fnn.activate(X[i]))