def init_brain(learn_data, epochs, hidden_count, TrainerClass=BackpropTrainer): global data_dir print("\t Epochs: ", epochs) if learn_data is None: return None print("Building network") net = buildNetwork(7 * 7, hidden_count, 4, hiddenclass=SigmoidLayer) # net = buildNetwork(64 * 64, 32 * 32, 8 * 8, 5) # net = buildNetwork(64 * 64, 5, hiddenclass=LinearLayer) # fill dataset with learn data trans = {'0': 0, '1': 1, '2': 2, '3': 3} ds = ClassificationDataSet(7 * 7, nb_classes=4, class_labels=['0', '1', '2', '3']) for inp, out in learn_data: ds.appendLinked(inp, [trans[out]]) ds.calculateStatistics() print("\tNumber of classes in dataset = {0}".format(ds.nClasses)) print("\tOutput in dataset is ", ds.getField('target').transpose()) ds._convertToOneOfMany(bounds=[0, 1]) print("\tBut after convert output in dataset is \n", ds.getField('target')) trainer = TrainerClass(net, learningrate=0.1, verbose=True) trainer.setData(ds) print( "\tEverything is ready for learning.\nPlease wait, training in progress..." ) start = time.time() trainer.trainEpochs(epochs=epochs) end = time.time() f = open(data_dir + "/values.txt", "w") f.write("Training time: %.2f \n" % (end - start)) f.write("Total epochs: %s \n" % (trainer.totalepochs)) # f.write("Error: %.22f" % (trainer.trainingErrors[len(trainer.trainingErrors) - 1])) f.close() print("Percent of error: ", percentError(trainer.testOnClassData(), ds['class'])) print("\tOk. We have trained our network.") NetworkWriter.writeToFile(net, data_dir + "/net.xml") return net
har_data=pd.read_csv('./HAR3.csv'); har_test_data = har_data.iloc[100000:110000,0:17]; har_test_label = har_data.iloc[100000:110000,17:18]; #error = 0.0120909090909 clf1 = joblib.load('./clfGB.pkl'); predict_test = clf1.predict(har_test_data); test_accuracy_gb = clf1.score(har_test_data, har_test_label); test_error_gb = 1-test_accuracy_gb; #nn error = 0.245666666667 fnn = joblib.load('./nn.pkl'); alldata = ClassificationDataSet(17, nb_classes=5); for i in range(len(har_test_data)): t=int(har_test_label.iloc[i])-1; alldata.addSample(har_test_data.iloc[i],[t]); alldata._convertToOneOfMany(bounds=[0, 1]); out = fnn.activateOnDataset(alldata); out = out.argmax(axis=1); out2=alldata.getField('target').argmax(axis=1); length = len(out); count=0; for i in range(len(out)): if out[i]!=out2[i]: count+=1; test_error_nn = float(count)/float(length); test_accuracy_nn = 1-test_error_nn;
for i in range(epochs): mse = trainer.train() rmse = sqrt(mse) print "training RMSE, epoch {}: {}".format(i + 1, rmse) #trainer.trainUntilConvergence( verbose = True, validationProportion = 0.15, maxEpochs = 1000, continueEpochs = 10 ) lb = preprocessing.LabelBinarizer() lb.fit(YtestPos) list(lb.classes_) YtestPos = lb.transform(YtestPos) ds.setField('input', XtestPos) ds.setField('target', YtestPos) x = ds.getField('input') y = ds.getField('target') trnresult = percentError(trainer.testOnClassData(), trndata['class']) tstresult = percentError(trainer.testOnClassData(dataset=x), YtestPos.T) print "epoch: %4d" % trainer.totalepochs, " train error: %5.2f%%" % trnresult, " test error: %5.2f%%" % tstresult # #from pybrain.datasets import ClassificationDataSet #from pybrain.utilities import percentError #from pybrain.tools.shortcuts import buildNetwork #from pybrain.supervised.trainers import BackpropTrainer #from pybrain.structure.modules import SoftmaxLayer #
category+=1 category = 0 for shape in shapes: for i in range(8): image = imread('C:/Users/alexis.matelin/Documents/Neural Networks/Visual classification/shapes/testing/'+shape+str(i+1)+'.png', as_grey=True, plugin=None, flatten=None) image_vector = image.flatten() ds_testing.appendLinked(image_vector, [category]) ds_training.calculateStatistics() ds_training.getClass(0) print(ds_training.getField('target')) ds_training._convertToOneOfMany(bounds=[0, 1]) ds_testing._convertToOneOfMany(bounds=[0, 1]) print(ds_training.getField('target')) net = buildNetwork(1024,12, 12, 3, hiddenclass = TanhLayer, outclass=SoftmaxLayer) trainer = BackpropTrainer(net, dataset=ds_training, verbose=True, learningrate=0.01) trainer.trainUntilConvergence() out = net.activateOnDataset(ds_testing) out = out.argmax(axis=1)
ratio = float(float(num) / float(len(possibilities[names[i]]))) nr.append(ratio) print ratio, column else: print column, "not an int or long", possibilities[names[i]], names[i] return np.array(nr[:-1]), nr[-1] dataset = ClassificationDataSet(46, 1, class_labels=possibilities['readmitted']) for row in cursor.execute("select %s from diabetic_data limit 0, 10000" % columns): xd, yd = createNPRow(row) dataset.addSample(xd, yd) nn = buildNetwork(dataset.indim, 20, dataset.outdim, outclass=SoftmaxLayer) trainer = BackpropTrainer(nn, dataset=dataset, momentum=0.1, verbose=True, weightdecay=0.01) print possibilities['readmitted'] print dataset.getField('target') for x in range(10): error = trainer.train() print error errors, success = 0,0 for row in cursor.execute("select %s from diabetic_data limit 50000, 101766" % columns): xd, yd = createNPRow(row) check = int(round(nn.activate(xd[:46])[0])) if check > 1: check = 1 prediction = possibilities['readmitted'][check] actual = possibilities['readmitted'][yd] if prediction == actual: match = "match" success += 1 else:
for i in range( epochs ): mse = trainer.train() rmse = sqrt(mse) print "training RMSE, epoch {}: {}".format( i + 1, rmse ) #trainer.trainUntilConvergence( verbose = True, validationProportion = 0.15, maxEpochs = 1000, continueEpochs = 10 ) lb = preprocessing.LabelBinarizer() lb.fit(YtestPos) list(lb.classes_) YtestPos = lb.transform(YtestPos) ds.setField( 'input', XtestPos ) ds.setField( 'target', YtestPos ) x = ds.getField('input') y = ds.getField('target') trnresult = percentError( trainer.testOnClassData(),trndata['class'] ) tstresult = percentError( trainer.testOnClassData(dataset=x ), YtestPos.T ) print "epoch: %4d" % trainer.totalepochs, " train error: %5.2f%%" % trnresult, " test error: %5.2f%%" % tstresult
#supervised learning tutorial from pybrain.datasets import SupervisedDataSet from pybrain.datasets import ClassificationDataSet # DS = SupervisedDataSet(3,2) # DS.appendLinked([1,2,3], [4,5]) # print(len(DS)) # DS['input'] # array([[1., 2., 3.]]) DS = ClassificationDataSet(2, class_labels=['Urd', 'Verdandi', 'skuld']) DS.appendLinked([0.1, 0.5] , [0]) DS.appendLinked([1.2, 1.2] , [1]) DS.appendLinked([1.4, 1.6] , [1]) DS.appendLinked([1.6, 1.8] , [1]) DS.appendLinked([0.10, 0.80] , [2]) DS.appendLinked([0.20, 0.90] , [2]) print(DS.calculateStatistics()) print(DS.classHist) print(DS.nClasses) print(DS.getClass(1)) print(DS.getField('target').transpose())
dataset = ClassificationDataSet(46, 1, class_labels=possibilities['readmitted']) for row in cursor.execute("select %s from diabetic_data limit 0, 10000" % columns): xd, yd = createNPRow(row) dataset.addSample(xd, yd) nn = buildNetwork(dataset.indim, 20, dataset.outdim, outclass=SoftmaxLayer) trainer = BackpropTrainer(nn, dataset=dataset, momentum=0.1, verbose=True, weightdecay=0.01) print possibilities['readmitted'] print dataset.getField('target') for x in range(10): error = trainer.train() print error errors, success = 0, 0 for row in cursor.execute("select %s from diabetic_data limit 50000, 101766" % columns): xd, yd = createNPRow(row) check = int(round(nn.activate(xd[:46])[0])) if check > 1: check = 1 prediction = possibilities['readmitted'][check] actual = possibilities['readmitted'][yd] if prediction == actual: match = "match" success += 1
def castToRegression(self, values): """Converts data set into a SupervisedDataSet for regression. Classes are used as indices into the value array given.""" regDs = SupervisedDataSet(self.indim, 1) fields = self.getFieldNames() fields.remove('target') for f in fields: regDs.setField(f, self[f]) regDs.setField('target', values[self['class'].astype(int)]) return regDs if __name__ == "__main__": dataset = ClassificationDataSet(2, 1, class_labels=['Urd', 'Verdandi', 'Skuld']) dataset.appendLinked([ 0.1, 0.5 ] , [0]) dataset.appendLinked([ 1.2, 1.2 ] , [1]) dataset.appendLinked([ 1.4, 1.6 ] , [1]) dataset.appendLinked([ 1.6, 1.8 ] , [1]) dataset.appendLinked([ 0.10, 0.80 ] , [2]) dataset.appendLinked([ 0.20, 0.90 ] , [2]) dataset.calculateStatistics() print(("class histogram:", dataset.classHist)) print(("# of classes:", dataset.nClasses)) print(("class 1 is: ", dataset.getClass(1))) print(("targets: ", dataset.getField('target'))) dataset._convertToOneOfMany(bounds=[0, 1]) print("converted targets: ") print((dataset.getField('target'))) dataset._convertToClassNb() print(("reconverted to original:", dataset.getField('target')))