def getOptCG(labels, values): # Format data for subsequent methods # Set up cross-validation settings param = Param() param.cset = range(-5, 15, 2) param.gset = range(3, -15, -2) param.nfold = 10 prob = svm_problem(labels, values) rVal = [[0 for col in range(len(param.cset))] for row in range(len(param.gset))]; # Cross-validation to get optimal parameters for i in range(len(param.gset)): param.g = 2 ** param.gset[i] for j in range(len(param.cset)): param.c = 2 ** param.cset[j] testParam = svm_parameter(param.libsvm) # Train on learning data with x-validation and store result rVal[i][j] = svm_train(prob, param.libsvm + " -v " + str(param.nfold)) # Select the parameters with highest accuracy min_val, loc = getMax(rVal) g = 2 ** param.gset[loc[0]] c = 2 ** param.cset[loc[1]] return c, g
def svmtrain(labels, values=None, c=None, g=None): # If Dictionary if isinstance(labels, dict): values = [j for i in labels.itervalues() for j in i.itervalues()] labels = [i + 1 for i in range(len(labels.values())) for j in range(len(labels[labels.keys()[i]]))] if values != None: optParam = Param() optParam.c = c optParam.g = g if c == None or g == None: # Retrieve optimal c and g optParam.c, optParam.g = getOptCG(labels, values) # Train model with optimal c and g prob = svm_problem(labels, values) m = svm_train(prob, optParam.libsvm) # Return model return m else: raise TypeError("Values not provided for the arguments")