def train_svm(): svm = SVMUnit() ds = ClassificationDataSet(len(input_args),1,nb_classes=2) ds = generate_data(ds , hour_to_use_app = 10) trainer = SVMTrainer( svm , ds ) trainer.train() test = ClassificationDataSet(4,1) test.addSample((12,6,10,6),[0]) test.addSample((12,1,7,22),[1]) test.addSample((12,3,20,1),[1]) svm.activateOnDataset(test) return svm,trainer,ds,test
def bench_pybrain(X, y, T, valid): # # .. PyBrain .. # # local import, they require libsvm < 2.81 from pybrain.supervised.trainers.svmtrainer import SVMTrainer from pybrain.structure.modules.svmunit import SVMUnit from pybrain.datasets import SupervisedDataSet tstart = datetime.now() ds = SupervisedDataSet(X.shape[1], 1) for i in range(X.shape[0]): ds.addSample(X[i], y[i]) clf = SVMTrainer(SVMUnit(), ds) clf.train() pred = np.empty(T.shape[0], dtype=np.int32) for i in range(T.shape[0]): pred[i] = clf.svm.model.predict(T[i]) score = np.mean(pred == valid) return score, datetime.now() - tstart
filename=join('.', 'testrun.log'), format='%(asctime)s %(levelname)s %(message)s') logging.getLogger('').addHandler(logging.StreamHandler()) # load the training and test data sets trndata = generateClassificationData(20, nClasses=2) tstdata = generateClassificationData(100, nClasses=2) # initialize the SVM module and a corresponding trainer svm = SVMUnit() trainer = SVMTrainer(svm, trndata) # train the with fixed meta-parameters log2C = 0. # degree of slack log2g = 1.1 # width of RBF kernels trainer.train(log2C=log2C, log2g=log2g) # alternatively, could train the SVM using design-of-experiments grid search ##trainer.train( search="GridSearchDOE" ) # pass data sets through the SVM to get performance trnresult = percentError(svm.activateOnDataset(trndata), trndata['target']) tstresult = percentError(svm.activateOnDataset(tstdata), tstdata['target']) print "sigma: %7g, C: %7g, train error: %5.2f%%, test error: %5.2f%%" % ( 2.0**log2g, 2.0**log2C, trnresult, tstresult) # generate a grid dataset griddat, X, Y = generateGridData(x=[-4, 8, 0.1], y=[-2, 3, 0.1]) # pass the grid through the SVM, but this time get the raw distance # from the boundary, not the class Z = svm.activateOnDataset(griddat, values=True)
dataset = SupervisedDataSet(2, 1) dataset.addSample((0, 0), (0,)) dataset.addSample((0, 1), (1,)) dataset.addSample((1, 0), (1,)) dataset.addSample((1, 1), (0,)) # Criando a SVM unit svm = SVMUnit() # Trainando a SVM trainer = SVMTrainer(svm, dataset) # Treinando a rede err = 1.0 while err > 0.0001: err = trainer.train() # print err # Imprimindo ativação da rede para todo o conjunto print network.activateOnDataset(dataset) # Imprimindo pesos da rede for mod in network.modules: for conn in network.connections[mod]: print conn for cc in range(len(conn.params)): print conn.whichBuffers(cc), conn.params[cc] # Ativando entradas individualmente # print network.activate([0,0]) # print network.activate([0,1])
#Crear los dataset supervisados trainDS = ClassificationDataSet(numColsTrain-1, nb_classes=2, class_labels=['Not_Cancer', 'Cancer']) for i in range(numPatTrain): trainDS.appendLinked(patternTrainInput[i], patternTrain[i, 0]) validDS = ClassificationDataSet(numColsTrain-1, nb_classes=2, class_labels=['Not_Cancer', 'Cancer']) for i in range(numPatValid): validDS.appendLinked(patternValidInput[i], patternValid[i, 0]) testDS = ClassificationDataSet(numColsTrain-1, nb_classes=2, class_labels=['Not_Cancer', 'Cancer']) for i in range(numPatTest): testDS.appendLinked(patternTestInput[i], patternTest[i, 0]) #Crear la SVM y el trainer svm = SVMUnit() trainer = SVMTrainer(svm, trainDS) #Parámetros de la SVM myLog2C=0. myLog2g=1.1 #Entrenar la red trainer.train(log2g=myLog2g, log2C=myLog2C) # trnresult = percentError( svm.activateOnDataset(trndata), trndata['target'] ) tstresult = percentError( svm.activateOnDataset(tstdata), tstdata['target'] ) print("sigma: %7g, C: %7g, train error: %5.2f%%, test error: %5.2f%%" % (2.0**myLog2g, 2.0**myLog2C, trnresult, tstresult))
# load the training and test data sets trndata = generateClassificationData(20, nClasses=2) tstdata = generateClassificationData(100, nClasses=2) for inpt, target in trndata: print inpt, target # initialize the SVM module and a corresponding trainer svm = SVMUnit() trainer = SVMTrainer(svm, trndata) # train the with fixed meta-parameters log2C = 0.0 # degree of slack log2g = 1.1 # width of RBF kernels trainer.train(log2C=log2C, log2g=log2g) # alternatively, could train the SVM using design-of-experiments grid search ##trainer.train( search="GridSearchDOE" ) # pass data sets through the SVM to get performance trnresult = percentError(svm.activateOnDataset(trndata), trndata["target"]) tstresult = percentError(svm.activateOnDataset(tstdata), tstdata["target"]) print "sigma: %7g, C: %7g, train error: %5.2f%%, test error: %5.2f%%" % ( 2.0 ** log2g, 2.0 ** log2C, trnresult, tstresult, ) # generate a grid dataset griddat, X, Y = generateGridData(x=[-4, 8, 0.1], y=[-2, 3, 0.1])