Ejemplo n.º 1
0
def train_svm():
    svm = SVMUnit()
    ds = ClassificationDataSet(len(input_args),1,nb_classes=2)

    ds = generate_data(ds , hour_to_use_app = 10)
    
    trainer = SVMTrainer( svm , ds )

    trainer.train()
    
    test = ClassificationDataSet(4,1)
    test.addSample((12,6,10,6),[0])
    test.addSample((12,1,7,22),[1])
    test.addSample((12,3,20,1),[1])
    
    svm.activateOnDataset(test)
    
    return svm,trainer,ds,test
Ejemplo n.º 2
0
def bench_pybrain(X, y, T, valid):
#
#       .. PyBrain ..
#
#   local import, they require libsvm < 2.81
    from pybrain.supervised.trainers.svmtrainer import SVMTrainer
    from pybrain.structure.modules.svmunit import SVMUnit
    from pybrain.datasets import SupervisedDataSet

    tstart = datetime.now()
    ds = SupervisedDataSet(X.shape[1], 1)
    for i in range(X.shape[0]):
        ds.addSample(X[i], y[i])
    clf = SVMTrainer(SVMUnit(), ds)
    clf.train()
    pred = np.empty(T.shape[0], dtype=np.int32)
    for i in range(T.shape[0]):
        pred[i] = clf.svm.model.predict(T[i])
    score = np.mean(pred == valid)
    return score, datetime.now() - tstart
Ejemplo n.º 3
0
from pybrain.supervised.trainers.svmtrainer import SVMTrainer

# import some local stuff
from datasets import generateClassificationData, plotData, generateGridData

logging.basicConfig(level=logging.INFO,
                    filename=join('.', 'testrun.log'),
                    format='%(asctime)s %(levelname)s %(message)s')
logging.getLogger('').addHandler(logging.StreamHandler())

# load the training and test data sets
trndata = generateClassificationData(20, nClasses=2)
tstdata = generateClassificationData(100, nClasses=2)

# initialize the SVM module and a corresponding trainer
svm = SVMUnit()
trainer = SVMTrainer(svm, trndata)

# train the with fixed meta-parameters
log2C = 0.  # degree of slack
log2g = 1.1  # width of RBF kernels
trainer.train(log2C=log2C, log2g=log2g)
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )

# pass data sets through the SVM to get performance
trnresult = percentError(svm.activateOnDataset(trndata), trndata['target'])
tstresult = percentError(svm.activateOnDataset(tstdata), tstdata['target'])
print "sigma: %7g,  C: %7g,  train error: %5.2f%%,  test error: %5.2f%%" % (
    2.0**log2g, 2.0**log2C, trnresult, tstresult)
Ejemplo n.º 4
0
#Crear los dataset supervisados
trainDS = ClassificationDataSet(numColsTrain-1, nb_classes=2, class_labels=['Not_Cancer', 'Cancer'])
for i in range(numPatTrain):
	trainDS.appendLinked(patternTrainInput[i], patternTrain[i, 0])
	
validDS = ClassificationDataSet(numColsTrain-1, nb_classes=2, class_labels=['Not_Cancer', 'Cancer'])
for i in range(numPatValid):
	validDS.appendLinked(patternValidInput[i], patternValid[i, 0])
	
testDS = ClassificationDataSet(numColsTrain-1, nb_classes=2, class_labels=['Not_Cancer', 'Cancer'])
for i in range(numPatTest):
	testDS.appendLinked(patternTestInput[i], patternTest[i, 0])

#Crear la SVM y el trainer
svm = SVMUnit()
trainer = SVMTrainer(svm, trainDS)

#Parámetros de la SVM
myLog2C=0.
myLog2g=1.1

#Entrenar la red
trainer.train(log2g=myLog2g, log2C=myLog2C)

#
trnresult = percentError( svm.activateOnDataset(trndata), trndata['target'] )
tstresult = percentError( svm.activateOnDataset(tstdata), tstdata['target'] )
print("sigma: %7g,  C: %7g,  train error: %5.2f%%,  test error: %5.2f%%" % (2.0**myLog2g, 2.0**myLog2C, trnresult, tstresult))
	
Ejemplo n.º 5
0
logging.basicConfig(
    level=logging.INFO, filename=join(".", "testrun.log"), format="%(asctime)s %(levelname)s %(message)s"
)
logging.getLogger("").addHandler(logging.StreamHandler())


# load the training and test data sets
trndata = generateClassificationData(20, nClasses=2)
tstdata = generateClassificationData(100, nClasses=2)

for inpt, target in trndata:
    print inpt, target

# initialize the SVM module and a corresponding trainer
svm = SVMUnit()
trainer = SVMTrainer(svm, trndata)

# train the with fixed meta-parameters
log2C = 0.0  # degree of slack
log2g = 1.1  # width of RBF kernels
trainer.train(log2C=log2C, log2g=log2g)
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )

# pass data sets through the SVM to get performance
trnresult = percentError(svm.activateOnDataset(trndata), trndata["target"])
tstresult = percentError(svm.activateOnDataset(tstdata), tstdata["target"])
print "sigma: %7g,  C: %7g,  train error: %5.2f%%,  test error: %5.2f%%" % (
    2.0 ** log2g,
    2.0 ** log2C,