Exemplo n.º 1
0
def TrainSvmLinear(Y, X, sweep_c=range(-2,8)):
    num_positives = float(Y.count(1))
    num_negatives = float(Y.count(-1))

    best_c = -1
    best_acc = -1
    for c_pow in sweep_c:
        current_c = np.power(2.0,c_pow)
        prob = svm.svm_problem(Y,X)
        param = svm.svm_parameter('-v 5 -t 0 -c %f -w-1 %f -w1 %f -q' % (current_c,
                                                                         100/num_negatives,
                                                                         100/num_positives))
        current_acc = svm.svm_train(prob, param)
        print '%f, %f' % (current_c, current_acc)
        if best_acc < current_acc:
            best_acc = current_acc
            best_c = current_c

        # recompute accuracy
        param = svm.svm_parameter('-t 0 -c %f -w-1 %f -w1 %f -q' % (best_c,
                                                                    100/num_negatives,
                                                                    100/num_positives))
        svm_model = svm.svm_train(prob, param)
        p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')


    prob = svm.svm_problem(Y,X)
    param = svm.svm_parameter('-t 0 -c %f -w-1 %f -w1 %f -q' % (best_c,
                                                                100/num_negatives,
                                                                100/num_positives))
    svm_model = svm.svm_train(prob, param)
    p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')
    pdb.set_trace()
    return svm_model
Exemplo n.º 2
0
def TrainSvmRbf2(Y, X, sweep_c=range(-5,5), sweep_g=range(-5,5)):
    num_negatives = float(Y.count(-1))
    num_positives = float(Y.count(1))

    best_c = -1
    best_g = -1
    best_acc = -1
    for c_pow in sweep_c:
        for g_pow in sweep_g:
            current_c = np.power(2.0,c_pow)
            current_g = np.power(2.0,g_pow)
            prob = svm.svm_problem(Y,X)
            param = svm.svm_parameter('-t 2 -c %f -g %f -w-1 %f -w1 %f -q' % (current_c,
                                                                              current_g,
                                                                              100/num_negatives,
                                                                              100/num_positives))
            current_pos_acc, current_neg_acc = CrossValidate(Y, X, param)
            current_acc = current_pos_acc
            print 'c = %f, g = %f, cv acc = %f, neg acc = %f' % (current_c, current_g, current_acc,
                                                                 current_neg_acc)
            if best_acc < current_acc:
                best_acc = current_acc
                best_c = current_c
                best_g = current_g

    prob = svm.svm_problem(Y,X)
    param = svm.svm_parameter('-t 2 -c %f -g %f -w-1 %f -w1 %f -q' % (best_c, best_g,
                                                                      100/num_negatives,
                                                                      100/num_positives))
    svm_model = svm.svm_train(prob, param)
    p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')
    pdb.set_trace()
    return svm_model
Exemplo n.º 3
0
    def test_multi_class_without_probability(self):
        # Generate some random data.
        # This unit test should not rely on scikit learn for test data.
        x, y = [], []
        for _ in range(50):
            x.append([
                random.gauss(200, 30),
                random.gauss(-100, 22),
                random.gauss(100, 42)
            ])
            y.append(random.choice([1, 2, 10, 12]))
        y[0], y[1], y[2], y[3] = 1, 2, 10, 12
        column_names = ['x1', 'x2', 'x3']
        prob = svmutil.svm_problem(y, x)

        df = pd.DataFrame(x, columns=column_names)

        for param1 in self.non_kernel_parameters:
            for param2 in self.kernel_parameters:
                param_str = ' '.join([self.base_param, param1, param2])
                param = svm_parameter(param_str)

                model = svm_train(prob, param)

                # Get predictions with probabilities as dictionaries
                (df['prediction'], _, _) = svm_predict(y, x, model, ' -q')

                spec = libsvm.convert(model, column_names, 'target')

                metrics = evaluate_classifier(spec, df, verbose=False)
                self.assertEquals(metrics['num_errors'], 0)
Exemplo n.º 4
0
def calculate_race():
    correct = 0
    answers = []
    input = []
    count = 0
    for d in data:
        answers.append(question2b_race_truth.truth[count])
        input.append(d)
        if count == 49:
            break
        count += 1

    prob = svmutil.svm_problem(answers, input)
    param = svmutil.svm_parameter('-t 2 -c 4')
    param.cross_validation = 1
    param.nr_fold = 10
    cv = svmutil.svm_train(prob, param)

    param = svmutil.svm_parameter('-t 2 -c 4')
    m = svmutil.svm_train(prob, param)
    count = 0
    for d in data:
        if count < 50:
            count += 1
            continue
        else:
            x0, max_idx = gen_svm_nodearray(d)
            p = libsvm.svm_predict(m, x0)
            if p == question2b_race_truth.truth[count]:
                correct += 1
            count += 1
    return cv, correct / float(50) * 100
Exemplo n.º 5
0
def kfold(data, labels, k):
	try:
		import svmutil
	except:
		return 0
	prabs = []

	for xxx in range(0, 10):
		picks = np.random.choice(len(data), len(data) / k, replace=False)
		testLabel = labels[picks]
		testPoint = data[picks]
		trainPoint = data[np.setdiff1d(range(0, len(data)), picks)]
		trainLabel = labels[np.setdiff1d(range(0, len(data)), picks)]

		trainLabel = trainLabel.tolist()
		trainPoint = trainPoint.tolist()

		prob = svmutil.svm_problem(trainLabel, trainPoint)
		param = svmutil.svm_parameter('-t 3 -c 4 -b 1 -q')
		testLabel = testLabel.tolist()
		testPoint = testPoint.tolist()

		m = svmutil.svm_train(prob, param)
		svmutil.svm_save_model('n.model', m)

		p_label, p_acc, p_val = svmutil.svm_predict(testLabel, testPoint, m, '-b 1')

		prabs.append(p_acc[0])

	print sum(prabs) / float(len(prabs))
	print 'std' + str(np.std(prabs))
	return sum(prabs) / float(len(prabs))
    def train(cls, featuresets, params="-t 0 -q"):
        """Train a classifier using the given featuresets.

        Args:
            featuresets: List of featuresets.
            params: Parameter string to pass to svmutil.svm_parameter.

        Returns:
            SvmClassifier object.
        """
        all_features = set()
        all_labels = set()
        for featuredict, label in featuresets:
            all_features.update(set(featuredict.keys()))
            all_labels.add(label)
        all_labels = sorted(all_labels)
        all_features = sorted(all_features)
        featureindex = dict(zip(all_features, range(1, len(all_features) + 1)))
        labelindex = dict(zip(all_labels, range(1, len(all_labels) + 1)))
        vectors, labels = cls.featuresets_to_svm(featureindex, labelindex,
                                                 featuresets)
        prob = svmutil.svm_problem(labels, vectors)
        param = svmutil.svm_parameter(params)
        model = svmutil.svm_train(prob, param)
        return cls(featureindex, labelindex, model)
Exemplo n.º 7
0
def part_d():
    print("\n--- Part D ---\n")

    print("Reading Data")
    train_y, train_x = read_data("train")
    test_y, test_x = read_data("test")

    print("Normalizing")
    train_x = normalize(train_x)
    test_x = normalize(test_x)

    problem = svm_problem(train_y, train_x)
    params = "-q -s 0 -t 2 -g 0.05"

    results = []
    for c in [10**-5, 10**-3, 1, 5, 10]:

        c = " -c %f " % c
        print("10-fold CV using" + c)
        cv_acc = svm_train(problem, params + c + "-v 10")

        print("On test data using" + c)
        model = svm_train(problem, params + c)
        _, test_acc, _ = svm_predict(test_y, test_x, model)
        print("C, Accuracy: ", c, cv_acc, test_acc)

        results.append((c, cv_acc, test_acc[0]))
Exemplo n.º 8
0
def main(path, k):
	
	prabs = []
	lns = []
	for kk in range(0,k-1):
		testLabel = []
		trainPoint = []
		trainLabel = []
		testPoint = []
		wcCount = 0
		for u in os.listdir(path): 
			if u[-2:] == 'WC':r
				wcCount += 1
				filePath = path+u
				WC = pickle.load(open(filePath, 'rb'))
				if wcCount % k == 0 + kk:
					testLabel.append(int(u[1]))
					testPoint.append(WC)
					
				else:
					trainLabel.append(int(u[1]))
					trainPoint.append(WC)

		lns.append(len(testLabel))
		prob = svmutil.svm_problem(trainLabel, trainPoint)
		param = svmutil.svm_parameter('-t 0 -c 4 -b 1 -q')


		m = svmutil.svm_train(prob, param)
		svmutil.svm_save_model('n.model', m)
		p_label, p_acc, p_val = svmutil.svm_predict(testLabel, testPoint, m, '-b 1')
		prabs.append(p_acc[0])
Exemplo n.º 9
0
def part_c():
    print("\n--- Part C ---\n")

    print("Reading Data")
    train_y, train_x = read_data("train")
    test_y, test_x = read_data("test")

    print("Normalizing")
    train_x = normalize(train_x).tolist()
    test_x = normalize(test_x).tolist()

    problem = svm_problem(train_y, train_x)
    params = svm_parameter("-q -s 0 -c 1")

    # Timing calculations
    print("Training SVM (linear kernel)")
    params.parse_options("-t 0")
    model = svm_train(problem, params)

    _, p_acc, _ = svm_predict(test_y, test_x, model)
    print("Accuracy: ", p_acc)

    print("Training SVM (gaussian kernel)")
    params.parse_options("-t 2 -g 0.05")
    model = svm_train(problem, params)

    _, p_acc, _ = svm_predict(test_y, test_x, model)
    print("Accuracy: ", p_acc)
Exemplo n.º 10
0
    def setUpClass(self):
        """
        Set up the unit test by loading the dataset and training a model.
        """
        if not HAS_LIBSVM:
            # setUpClass is still called even if class is skipped.
            return

        # Generate some random data.
        # This unit test should not rely on scikit learn for test data.
        self.x, self.y = [], []
        random.seed(42)
        for _ in range(50):
            self.x.append([random.gauss(200, 30), random.gauss(-100, 22)])
            self.y.append(random.choice([1, 2]))
        self.y[0] = 1  # Make sure 1 is always the first label it sees
        self.y[1] = 2
        self.column_names = ['x1', 'x2']
        self.prob = svmutil.svm_problem(self.y, self.x)

        param = svmutil.svm_parameter()
        param.svm_type = svmutil.C_SVC
        param.kernel_type = svmutil.LINEAR
        param.eps = 1
        param.probability = 1

        self.libsvm_model = svmutil.svm_train(self.prob, param)
Exemplo n.º 11
0
def TrainSvmLinear2(Y, X, sweep_c=range(-2,18)):
    num_positives = float(Y.count(1))
    num_negatives = float(Y.count(-1))

    best_c = -1
    best_acc = -1
    for c_pow in sweep_c:
        current_c = np.power(2.0,c_pow)
        param = svm.svm_parameter('-t 0 -c %f -w-1 %f -w1 %f -q' % (current_c,
                                                                    100/num_negatives,
                                                                    100/num_positives))
        current_pos_acc, current_neg_acc = CrossValidate(Y, X, param)
        current_acc = current_pos_acc
        print '%f, %f, %f' % (current_c, current_acc, current_neg_acc)
        if best_acc < current_acc:
            best_acc = current_acc
            best_c = current_c

    prob = svm.svm_problem(Y,X)
    param = svm.svm_parameter('-t 0 -c %f -w-1 %f -w1 %f -q' % (best_c,
                                                                100/num_negatives,
                                                                100/num_positives))
    svm_model = svm.svm_train(prob, param)
    p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')
    return svm_model
Exemplo n.º 12
0
    def test_convert_svmc_raw(self):
        iris = load_iris()

        X = iris.data[:, :2]
        y = iris.target
        y[y == 2] = 1

        prob = svmutil.svm_problem(y, X.tolist())

        param = svmutil.svm_parameter()
        param.svm_type = SVC
        param.kernel_type = svmutil.RBF
        param.eps = 1
        param.probability = 0
        if noprint:
            param.print_func = noprint

        libsvm_model = svmutil.svm_train(prob, param)

        # known svm runtime dimension error in ONNX Runtime
        node = convert(libsvm_model, "LibSvmSvmcRaw", [('input', FloatTensorType(shape=[1, 'None']))])
        self.assertTrue(node is not None)
        dump_data_and_model(X[:5].astype(numpy.float32), SkAPICl(libsvm_model), node,
                            basename="LibSvmSvmcRaw",
                            allow_failure="StrictVersion(onnxruntime.__version__) < StrictVersion('0.5.0')")
Exemplo n.º 13
0
    def test_convert_svmc_linear_raw_multi(self):
        iris = load_iris()

        X = iris.data[:, :2]
        y = iris.target
        y[-5:] = 3

        prob = svmutil.svm_problem(y, X.tolist())

        param = svmutil.svm_parameter()
        param.svm_type = SVC
        param.kernel_type = svmutil.LINEAR
        param.eps = 1
        param.probability = 0
        if noprint:
            param.print_func = noprint

        libsvm_model = svmutil.svm_train(prob, param)

        node = convert(libsvm_model, "LibSvmNuSvmcMultiRaw", [('input', FloatTensorType(shape=[1, 2]))])
        self.assertTrue(node is not None)
        X2 = numpy.vstack([X[:2], X[60:62], X[110:112], X[147:149]])  # 5x0, 5x1
        dump_data_and_model(X2.astype(numpy.float32), SkAPICl(libsvm_model), node,
                            basename="LibSvmSvmcRaw-Dec3", verbose=False,
                            allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.3')")
Exemplo n.º 14
0
    def test_convert_svmc_linear(self):
        iris = load_iris()

        X = iris.data[:, :2]
        y = iris.target
        y[y == 2] = 1

        prob = svmutil.svm_problem(y, X.tolist())

        param = svmutil.svm_parameter()
        param.svm_type = SVC
        param.kernel_type = svmutil.LINEAR
        param.eps = 1
        param.probability = 1
        if noprint:
            param.print_func = noprint

        libsvm_model = svmutil.svm_train(prob, param)

        node = convert(libsvm_model, "LibSvmSvmcLinear",
                       [('input', FloatTensorType())])
        self.assertTrue(node is not None)
        dump_data_and_model(
            X[:5].astype(numpy.float32),
            SkAPIClProba2(libsvm_model),
            node,
            basename="LibSvmSvmcLinear-Dec2",
            allow_failure=
            "StrictVersion(onnxruntime.__version__) < StrictVersion('0.5.0')")
Exemplo n.º 15
0
def printSvmValidationAccuracy(input, output):
	
	prob = svmutil.svm_problem(output, input)
	param = getSvmParam(True)
	
	accuracy = svmutil.svm_train(prob, param)
	return accuracy
Exemplo n.º 16
0
def CrossValidate(Y, X, param, k_folds=5):
    rand_idx = range(len(Y))
    random.shuffle(rand_idx)
    idx_groups = SplitIntoK(k_folds, rand_idx)
    pos_acc = 0
    neg_acc = 0
    for i in range(k_folds):
        test_idx = idx_groups[i]
        exclude_test = [idx_groups[j] for j in range(len(idx_groups)) if i != j]
        train_idx = list(chain(*exclude_test))

        Y_test = [Y[test_i] for test_i in test_idx]
        X_test = [X[test_i] for test_i in test_idx]        

        Y_train = [Y[train_i] for train_i in train_idx]
        X_train = [X[train_i] for train_i in train_idx]        

        # recompute accuracy
        prob = svm.svm_problem(Y_train,X_train)
        svm_model = svm.svm_train(prob, param)

        p_labs, p_acc, p_vals = svm.svm_predict(Y_test, X_test, svm_model, '-q')

        tps = sum([1 for j in range(len(p_labs)) if (p_labs[j]==1 and Y_test[j]==1)])
        fns = sum([1 for j in range(len(p_labs)) if (p_labs[j]==-1 and Y_test[j]==1)])

        tns = sum([1 for j in range(len(p_labs)) if (p_labs[j]==-1 and Y_test[j]==-1)])
        fps = sum([1 for j in range(len(p_labs)) if (p_labs[j]==1 and Y_test[j]==-1)])

        pos_acc += tps / float(tps + fns)
        neg_acc += tns / float(tns + fps)

    pos_acc = pos_acc / k_folds
    neg_acc = neg_acc / k_folds
    return (pos_acc, neg_acc)
Exemplo n.º 17
0
 def train(self,x,y):
     """
     training using y=list,x=dict
     parameter = string of parameters
     """
     prob=su.svm_problem(y,x)
     para=""
     para+= "-s %d -t %d -d %d -g %f -r %f -c %f -n %f -p %f -e %f -b %d" %\
         (
             self.type,
             self.kernel,
             self.degree,
             self.gamma,
             self.coef0,
             self.c,
             self.nu,
             self.p,
             self.eps,
             self.prob
         )
     if(self.v!=0):
         para+=" -v %d" % self.v
     if(self.q!=0):
         para+= " -q"
     print para
     para1=su.svm_parameter(para)
     self.model=su.svm_train(prob,para1)
     return True
Exemplo n.º 18
0
    def test_convert_svmc(self):
        iris = load_iris()

        X = iris.data[:, :2]
        y = iris.target
        y[y == 2] = 1

        prob = svmutil.svm_problem(y, X.tolist())

        param = svmutil.svm_parameter()
        param.svm_type = SVC
        param.kernel_type = svmutil.RBF
        param.eps = 1
        param.probability = 1
        if noprint:
            param.print_func = noprint

        libsvm_model = svmutil.svm_train(prob, param)

        node = convert(libsvm_model, "LibSvmSvmc",
                       [('input', FloatTensorType())])
        self.assertTrue(node is not None)
        dump_data_and_model(X[:5].astype(numpy.float32),
                            SkAPIClProba2(libsvm_model),
                            node,
                            basename="LibSvmSvmc-Dec2")
Exemplo n.º 19
0
def _lib_train_libsvm(user_tfidf, num_pos, num_neg, ignore):
    sparse_user_tfidf, num_pos, num_neg = _convert_to_sparse_matrix(user_tfidf, num_pos, num_neg, ignore)
    labels = ([1] * num_pos) + ([-1] * num_neg)

    param = svm_parameter("-t %d" % KERNEL_NUMBER)
    prob = svm_problem(labels, sparse_user_tfidf)
    modellog = svm_train(prob, param)
    return modellog
Exemplo n.º 20
0
 def train_fold(self, k, c):
     self.print_debug('train_fold', k, c)
     folder_name = 'fold_' + str(k) + '/'
     file_name = self.filename + '.data.svm'
     y, x = svmutil.svm_read_problem(folder_name + file_name)
     prob = svmutil.svm_problem(y, x)
     param = svmutil.svm_parameter('-s 0 -t 0 -c ' + str(c))
     m = svmutil.svm_train(prob, param)
     return m
Exemplo n.º 21
0
 def convert(self):
     count = True
     self.hist = []
     self.vector = []
     self.problem = []
     true = []
     for i in self.images:#all training histogram and vector data together
         for j in i.trainDataHist:
             self.hist.append(j)
             if count == True:
                 true.append(1)
             else:
                 true.append(0)
         for k in i.trainDataVector:
             self.vector.append(k)
         count = False
     self.problem.append(svmutil.svm_problem(true,self.hist))#hist
     self.problem.append(svmutil.svm_problem(true,self.vector))#vector
Exemplo n.º 22
0
 def svm_traing_process(self):
     features, labels = self.load_images_dataset(self.training_set_path)
     # train a SVM classifier
     features = map(list, features)
     prob = svmutil.svm_problem(labels, features)
     param = svmutil.svm_parameter(self.kernal_option)
     model = svmutil.svm_train(prob, param)
     res = svmutil.svm_predict(labels, features, model)
     return model
Exemplo n.º 23
0
 def train_fold_polynomial(self, k, c, g, d):
     self.print_debug('train_fold_polynomial', k, c, g, d)
     folder_name = 'fold_' + str(k) + '/'
     file_name = self.filename + '.data.svm'
     y, x = svmutil.svm_read_problem(folder_name + file_name)
     prob = svmutil.svm_problem(y, x, isKernel=True)
     param = svmutil.svm_parameter('-s 0 -t 1 -c ' + str(c)
                                   + ' -g ' + str(g) + ' -d ' + str(d))
     m = svmutil.svm_train(prob, param)
     return m
Exemplo n.º 24
0
def get_cross_val(x, y, x_val, y_val, gamma_c):
    prob  = svmutil.svm_problem(y, x)
    param = svmutil.svm_parameter('-t 2 -q -c {0} -g {1}'.format(gamma_c.C, gamma_c.gamma))
    m = svmutil.svm_train(prob, param)

    svmutil.svm_save_model("model", m)

    p_label_validation, p_acc_validation, p_val_validation = svmutil.svm_predict(y_val, x_val, m)

    return p_acc_validation[0]
Exemplo n.º 25
0
def trainSVM(trainMatrix, trainCategory):
    svm.svm_model.predict = lambda self, x: svm.svm_predict([0], [x], self)[0][0]

    prob = svm.svm_problem(trainCategory, trainMatrix)
    param = svm.svm_parameter()
    param.kernel_type = svm.LINEAR
    param.C = 10

    model = svm.svm_train(prob, param)
    return model
Exemplo n.º 26
0
 def __init__(self, classified_data_list, kernel):
     self.kernel = kernel
     data = np.empty([0, classified_data_list[0].shape[1]])
     labels = np.empty([0])
     for i, d in enumerate(classified_data_list):
         data = np.vstack((data, d))
         labels = np.append(labels, i * np.ones(d.shape[0]))
     problem = svmutil.svm_problem(labels.tolist(), data.tolist())
     kmap = {'poly': 1, 'rbf': 2, 'sigmoid': 3}
     param = svmutil.svm_parameter('-q -c {} -t {}'.format(C, kmap[kernel]))
     self.svm = svmutil.svm_train(problem, param)
def getSVMAccuracy(trainingData):
	numOutputs = len(trainingData[0][1])
	for outputIndex in range(numOutputs):
		inputs = [input for (input, output) in trainingData]
		outputs = [output[outputIndex] for (input, output) in trainingData]
		
		prob = svmutil.svm_problem(outputs, inputs)
		param = svmAccuracy.getSvmParam(cross_validation_only = True)
		
		model = svmutil.svm_train(prob, param)
		print 'output index: %d - %s\n' % (outputIndex, {0 : "Index", 1: "Middle"}[outputIndex])
Exemplo n.º 28
0
    def _stop_training(self):
        super(LibSVMClassifier, self)._stop_training()
        self.normalizer = _LabelNormalizer(self.labels)

        labels = self.normalizer.normalize(self.labels.tolist())
        features = self.data

        # Call svm training method.
        prob = libsvmutil.svm_problem(labels, features.tolist())
        # Train
        self.model = libsvmutil.svm_train(prob, self.parameter)
Exemplo n.º 29
0
    def _stop_training(self):
        super(LibSVMClassifier, self)._stop_training()
        self.normalizer = _LabelNormalizer(self.labels)
                
        labels = self.normalizer.normalize(self.labels.tolist())
        features = self.data

        # Call svm training method.
        prob = libsvmutil.svm_problem(labels, features.tolist())
        # Train
        self.model = libsvmutil.svm_train(prob, self.parameter)
Exemplo n.º 30
0
def train_grasp(grasp_type, side):
    """
    train_grasp(grasp_type): 
    train linear svm classifier for specific grasp type\n
    grasp_type: hand grasping type\n
    side: left hand or right hand\n
    """
    #train
    datafile = "model/traindata_grasp_" + grasp_type + "_" + side
    if not os.path.isfile(datafile):
        srcfile = "data/feature_grasp_train.csv"
        write_svmdata_grasp(srcfile, datafile, grasp_type, side, 0)
    label_train, data_train = svmutil.svm_read_problem(datafile)
    modelfile = "model/model_grasp_" + grasp_type + "_" + side
    m = []
    if not os.path.isfile(modelfile):
        print("train model: " + grasp_type + "_" + side)
        label_weight = {}
        for v in label_train:
            if label_weight.has_key(v):
                label_weight[v] += 1
            else:
                label_weight[v] = 1
        sorted_label = sorted(label_weight)
        param_weight = ' '
        for v in sorted_label:
            label_weight[v] = float(
                len(label_train)) / len(sorted_label) / label_weight[v]
            param_weight += '-w%d %f ' % (v, label_weight[v])
        prob = svmutil.svm_problem(label_train, data_train)
        param = svmutil.svm_parameter('-t 0 -b 1 -q' + param_weight)
        print '-t 0 -b 1 -q' + param_weight
        #        param = svmutil.svm_parameter('-t 0 -c 4 -b 1 -q')
        m = svmutil.svm_train(prob, param)
        svmutil.svm_save_model(modelfile, m)
    else:
        print("load model: " + grasp_type + "_" + side)
        m = svmutil.svm_load_model(modelfile)
    #test
    grasp_info = read_info("data/feature_grasp_test.csv", side)
    datafile = "model/testdata_grasp_" + grasp_type + "_" + side
    if not os.path.isfile(datafile):
        srcfile = "data/feature_grasp_test.csv"
        write_svmdata_grasp(srcfile, datafile, grasp_type, side, 1)
    label_test, data_test = svmutil.svm_read_problem(datafile)
    p_label, p_acc, p_val = svmutil.svm_predict(label_test, data_test, m,
                                                '-b 1')
    f_result = open("result/grasp_" + grasp_type + "_" + side + ".csv", "w")
    for i in range(len(p_label)):
        f_result.write(grasp_info[i] + ", " + str(int(label_test[i])) + ", " +
                       str(int(p_label[i])) + ", ")
        f_result.write("[%.4f]\n" % p_val[i][0])
    f_result.close()
Exemplo n.º 31
0
def multiclass_train(valid_labels, labels, data, svm_parameters=None):
    if svm_parameters == None:
        # make default empty parameters 
        svm_parameters = []
        for i in valid_labels:
            svm_parameters.append(svmutil.svm_parameter())
    models = []
    for i in valid_labels:
        oaa_labels = relabel_one_against_all(labels, i)
        prob = svmutil.svm_problem(oaa_labels, data)
        model = svmutil.svm_train(prob, svm_parameters[i])
        models.append(model)
    return models
Exemplo n.º 32
0
 def __init__(self, data=[], labels=[], kernel=svmutil.RBF, c=10):
     self.__svmparam__             = svmutil.svm_parameter('-q')
     self.__svmparam__.kernel_type = kernel
     self.__svmparam__.C           = c
     self.c                        = c
     self.data                     = data
     self.labels                   = labels
     if len(data) > 0:
         self.problem = svmutil.svm_problem(labels,data)
         self.model   = svmutil.svm_train(self.problem,self.__svmparam__,'-q')
     else:
         self.problem = None
         self.model   = None
Exemplo n.º 33
0
def SVM(X, Y, gamma):
    X = X.tolist()
    Y = Y.tolist()
    prob = svm_problem(Y, X)
    param = svm_parameter('-g ' + str(gamma) + ' -t 2 -c 1e6')  # hard margin
    m = svm_train(prob, param)

    # return model
    def g(X):
        N = X.shape[0]
        Y = np.zeros(N)
        return np.array(
            svm_predict(Y.tolist(), X.tolist(), m, options='-q')[0])

    return g
Exemplo n.º 34
0
def train_manipulation(mnp_type):
    """  
    """
    #train
    datafile = "manipulate/model/traindata_mnp_"+mnp_type
    if not os.path.isfile(datafile):
        srcfile = "manipulate/data/feature_mnp_train.csv"
        write_svmdata_mnp(srcfile, datafile, mnp_type, 0)    
    label_train,data_train = svmutil.svm_read_problem(datafile)
    modelfile = "manipulate/model/model_mnp_"+mnp_type
    m = []
    if not os.path.isfile(modelfile):
        print("train model: " + mnp_type)
        label_weight = {}
        for v in label_train:
            if label_weight.has_key(v):
                label_weight[v]+=1
            else:
                label_weight[v]=1
        sorted_label = sorted(label_weight)
        param_weight = ' '
        for v in sorted_label:
            label_weight[v] = float(len(label_train))/len(sorted_label)/label_weight[v]
            param_weight += '-w%d %f ' % (v, label_weight[v])
        prob = svmutil.svm_problem(label_train, data_train)        
        param = svmutil.svm_parameter('-t 0 -b 1 -q'+param_weight)
        print '-t 0 -b 1 -q'+param_weight
        m = svmutil.svm_train(prob, param)        
        svmutil.svm_save_model(modelfile, m)
    else:
        print("load model: " + mnp_type)
        m = svmutil.svm_load_model(modelfile)
#    weight = read_model_linearSVM(modelfile, len(data_train[0]))
#    print weight
    #test    
    mnp_info = read_info("manipulate/data/feature_mnp_test.csv")
    datafile = "manipulate/model/testdata_mnp_"+mnp_type
    if not os.path.isfile(datafile):
        srcfile = "manipulate/data/feature_mnp_test.csv"
        write_svmdata_mnp(srcfile, datafile, mnp_type, 1)    
    label_test,data_test = svmutil.svm_read_problem(datafile)
    p_label, p_acc, p_val = svmutil.svm_predict(label_test, data_test, m, '-b 1')
    f_result = open("manipulate/result/mnp_" + mnp_type + ".csv", "w")
    for i in range(len(p_label)):
        f_result.write(mnp_info[i]+", "+str(int(label_test[i]))+", "+str(int(p_label[i]))+", ")
        f_result.write("[%.4f]\n" % p_val[i][0])
    f_result.close()
def run_svm_model(dbCur,trainY,trainX,testY,testX,time_stamp,round,num_cutoff_points):
    trainY = (2*trainY-1).tolist(); testYSVM = (2*testY-1).tolist() # Convert labels to -1,+1 format and lists as required by libsvm.
    trainY = [i[0] for i in trainY]; testYSVM = [i[0] for i in testY] # Convert to list of floats from list of lists.
    trainX = trainX.tolist(); testX = testX.tolist() # Convert to list as required by libsvm.
    prob = svm.svm_problem(trainY,trainX)
    params = svm.svm_parameter('-b 1 -q')
    svmmodel = svm.svm_train(prob,params)
    p_label, p_acc, p_val = svm.svm_predict(testYSVM,testX,svmmodel,'-b 1')
    probs = n.array(p_val)[:,1]; probs.shape = (len(probs),1)
    performance_results,valid_ind = m.compute_pred_performance_curve(simple_comp,testY,probs,None,num_cutoff_points)
    for idx in range(num_cutoff_points):
        if valid_ind[0,idx] == 1:
            recall = performance_results[0,idx]; precision = performance_results[1,idx]
        else:
            recall = None; precision = None
        dbCur.execute("INSERT INTO snp_results_detail (time_stamp,model_used,fold_no,seq_no,val_recall,val_precision) \
              VALUES(timestamp %s,'svm',%s,%s,%s,%s);",[time_stamp,round+1,idx,recall,precision])
Exemplo n.º 36
0
    def setUpClass(self):
        """
        Set up the unit test by loading the dataset and training a model.
        """
        if not HAS_SKLEARN:
            return
        if not HAS_LIBSVM:
            return

        scikit_data = load_boston()
        prob = svmutil.svm_problem(scikit_data['target'], scikit_data['data'].tolist())
        param = svmutil.svm_parameter()
        param.svm_type = svmutil.NU_SVR
        param.kernel_type = svmutil.LINEAR
        param.eps = 1

        self.libsvm_model = svmutil.svm_train(prob, param)
Exemplo n.º 37
0
    def train(self, learning_set):
        """Train the classifier with a list of character objects that have
        known values."""
        classes = []
        features = []
        l = len(learning_set)

        for i, char in enumerate(learning_set):
            if self.verbose:
                print 'Found "%s"  --  %d of %d (%d%% done)' % (char.value, i + 1, l, round(100 * (i + 1) / l))
            classes.append(float(ord(char.value)))
            # features.append(char.get_feature_vector())
            char.get_single_cell_feature_vector(self.neighbours)
            features.append(char.feature)

        problem = svm_problem(classes, features)
        self.model = svm_train(problem, self.param)
Exemplo n.º 38
0
    def train(self, learning_set):
        """Train the classifier with a list of character objects that have
        known values."""
        classes = []
        features = []
        l = len(learning_set)

        for i, char in enumerate(learning_set):
            if self.verbose:
                print 'Found "%s"  --  %d of %d (%d%% done)' \
                    % (char.value, i + 1, l, round(100 * (i + 1) / l))
            classes.append(float(ord(char.value)))
            #features.append(char.get_feature_vector())
            char.get_single_cell_feature_vector(self.neighbours)
            features.append(char.feature)

        problem = svm_problem(classes, features)
        self.model = svm_train(problem, self.param)
Exemplo n.º 39
0
	def cSvmTrainSet(self):
	  	dataMat = []
	  	labelMat = []
	  	file_pattern = re.compile('^%s-\d.rec' % self.legalName)
	  	for fdata in os.listdir('data'):
	  		if file_pattern.match(fdata):
	  			data,label = loadDataSet('data/'+fdata,1)
	  		else:
	  			data,label = loadDataSet('data/'+fdata,-1)
	  		dataMat+=data
	  		labelMat+=label
		libSvmFormatSaveInFile(dataMat,labelMat,'data_format/%s.mat' % self.legalName) # todo: duoxiancheng
		y,x = svmutil.svm_read_problem('data_format/%s.mat' % self.legalName)
		prob = svmutil.svm_problem(y,x,isKernel = True)
		param = svmutil.svm_parameter('-t 0 ')
		self.model = svmutil.svm_train(prob,param)

		print self.model
Exemplo n.º 40
0
    def cSvmTrainSet(self):
        dataMat = []
        labelMat = []
        file_pattern = re.compile('^%s-\d.rec' % self.legalName)
        for fdata in os.listdir('data'):
            if file_pattern.match(fdata):
                data, label = loadDataSet('data/' + fdata, 1)
            else:
                data, label = loadDataSet('data/' + fdata, -1)
            dataMat += data
            labelMat += label
        libSvmFormatSaveInFile(dataMat, labelMat, 'data_format/%s.mat' %
                               self.legalName)  # todo: duoxiancheng
        y, x = svmutil.svm_read_problem('data_format/%s.mat' % self.legalName)
        prob = svmutil.svm_problem(y, x, isKernel=True)
        param = svmutil.svm_parameter('-t 0 ')
        self.model = svmutil.svm_train(prob, param)

        print self.model
Exemplo n.º 41
0
    def _evaluation_test_helper_no_probability(self, labels, allow_slow):
        # Generate some random data.
        # This unit test should not rely on scikit learn for test data.
        x, y = [], []
        random.seed(42)
        for _ in range(50):
            x.append([
                random.gauss(200, 30),
                random.gauss(-100, 22),
                random.gauss(100, 42)
            ])
            y.append(random.choice(labels))
        # make sure first label is seen first, second is seen second, and so on.
        for i, val in enumerate(labels):
            y[i] = val
        column_names = ["x1", "x2", "x3"]
        prob = svmutil.svm_problem(y, x)

        df = pd.DataFrame(x, columns=column_names)

        for param1 in self.non_kernel_parameters:
            for param2 in self.kernel_parameters:
                param_str = " ".join([self.base_param, param1, param2])
                print("PARAMS: ", param_str)
                param = svm_parameter(param_str)

                model = svm_train(prob, param)

                # Get predictions with probabilities as dictionaries
                (df["prediction"], _, _) = svm_predict(y, x, model, " -q")

                spec = libsvm.convert(model, column_names, "target")

                if _is_macos() and _macos_version() >= (10, 13):
                    metrics = evaluate_classifier(spec, df, verbose=False)
                    self.assertEquals(metrics["num_errors"], 0)

                if not allow_slow:
                    break

            if not allow_slow:
                break
Exemplo n.º 42
0
    def test_convert_svmr_linear(self):
        iris = load_iris()

        X = iris.data[:, :2]
        y = iris.target
        prob = svmutil.svm_problem(y, X.tolist())

        param = svmutil.svm_parameter()
        param.svm_type = SVR
        param.kernel_type = svmutil.LINEAR
        param.eps = 1
        if noprint:
            param.print_func = noprint

        libsvm_model = svmutil.svm_train(prob, param)

        node = convert(libsvm_model, "LibSvmSvmrLinear", [('input', FloatTensorType(shape=[1, 'None']))])
        self.assertTrue(node is not None)
        dump_data_and_model(X[:5].astype(numpy.float32), SkAPIReg(libsvm_model), node,
                            basename="LibSvmSvmrLinear-Dec3")
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-r', '--resdir', type=str, required=True,
            help="Results directory")
    parser.add_argument('-f', '--feature', type=str, required=True,
            help='feature to use to learn')
    args = parser.parse_args()
    SCORES_FPATH = os.path.join(args.resdir, 'scores.txt')
    FEAT_DIR = os.path.join(args.resdir, 'features', args.feature)

    scores = np.fromfile(SCORES_FPATH, sep='\n')
    feats = []
    for i in range(1, len(scores) + 1):
        feats.append(np.fromfile(os.path.join(FEAT_DIR, str(i) + '.txt'), sep='\n').tolist())
#   feats = np.array(feats)
    print('Read all features')
    params = svmutil.svm_parameter('-s 4 -t 2')
    model = svmutil.svm_train(svmutil.svm_problem(scores, feats), params)
    svmutil.svm_save_model(os.path.join(args.resdir, 'svr.model'), model)
    print svmutil.svm_predict(scores, feats, model)
Exemplo n.º 44
0
def compute_auc(gram_matrix, data, k=10, C=1.0):

    kv = cross_validation.StratifiedKFold(labels, n_folds=k)
    s = 0.0

    for train_index, test_index in kv:

        gm_train = gram_matrix[train_index, :]
        gm_train = gm_train[:, train_index]
        data_train = data[train_index]

        # libSVM wants the distances from test instances to all train instances as input
        # see http://stackoverflow.com/questions/10978261/libsvm-precomputed-kernels
        gm_test = gram_matrix[test_index, :]
        gm_test = gm_test[:, train_index] #!
        data_test = data[test_index]

        # Have to use libsvm directly here, because of a bug in sklearn with precomputed gram matrices
        x = []
        for i in range(len(gm_train)):
            l = gm_train[i].tolist()
            l.insert(0, i + 1)
            x.append(l)

        prob = svmutil.svm_problem(data_train.tolist(), x, isKernel=True)
        param = svmutil.svm_parameter("-t 4 -c %.410f -q" % C)
        m = svmutil.svm_train(prob, param)

        xx = []
        for i in range(len(gm_test)):
            t = gm_test[i].tolist()
            t.insert(0, i + 1)
            xx.append(t)

        p_label, p_acc, p_val = svmutil.svm_predict(data_test.tolist(), xx, m)

        fpr, tpr, thresholds = roc_curve(data_test, p_val, pos_label=1.0)
        AUC = roc_auc_score(data_test, p_val)
        s += AUC

    return s / k
Exemplo n.º 45
0
def main(path):
	
	label = []
	points = []
	for u in os.listdir(path): 
		if u[-2:] == 'WC':
			
			filePath = path+u
			WC = pickle.load(open(filePath, 'rb'))
			label.append(u[1])
			points.append(WC)
	label = [int(i) for i in label]
	
	prob = svmutil.svm_problem(label, points)
	param = svmutil.svm_parameter('-t 0 -c 4 -b 1')
	
	m = svmutil.svm_train(prob, param)
	svmutil.svm_save_model('n.model', m)
	
	p_label, p_acc, p_val = svmutil.svm_predict(label, points, m, '-b 1')
	
	return p_acc
Exemplo n.º 46
0
def prob2_to_4():
    x = np.array([
        [ 1,  0],
        [ 0,  1],
        [ 0, -1],
        [-1,  0],
        [ 0,  2],
        [ 0, -2],
        [-2,  0]
    ])
    y = np.array([-1,-1,-1,1,1,1,1])

    print "===prob 2==="
    xf = np.fliplr(x.copy())
    print xf*xf - 2*x + np.array([[3,-3]])

    print "===prob 3==="
    prob  = SVM.svm_problem(y.tolist(), x.tolist())
    param = SVM.svm_parameter('-t 1 -c 100 -d 2 -r 1 -g 1') # very large C for hard margin
    m = SVM.svm_train(prob, param)
    sumA = 0
    poly = [0] * 6 # xx, xy, yy, x, y, 1
    for i in xrange(m.l):
        idx = m.sv_indices[i]
        alphay = m.sv_coef[0][i]
        alpha = abs(m.sv_coef[0][i])
        print "{:d} {:+1.2f}".format(idx, alpha)
        sumA += alpha
        v = x[idx-1]
        poly[0] += alphay*v[0]*v[0]
        poly[1] += alphay*v[1]*v[0]*2
        poly[2] += alphay*v[1]*v[1]
        poly[3] += alphay*v[0]*2
        poly[4] += alphay*v[1]*2
        # poly[5] += alphay*1 # no need because Sum(alphay) = 0
    poly[5] -= m.rho[0]
    print "Sum of alpha is {:1.3f}\nb = {}".format(sumA, m.rho[0])
    print "{:+2.2f}xx {:+2.2f}xy {:+2.2f}yy {:+2.2f}x {:+2.2f}y {:+2.2f}".format(*poly)
def train_attribute(attribute, side):
    """
    train_attribute(str, float): 
    train linear svm classifier for specific attribute\n
    attribute: should be one from ["prismatic", "sphere", "flat", "rigid"]
    """
    #train
    datafile = "model/traindata_attribute_"+attribute+"_"+side
    if not os.path.isfile(datafile):
        srcfile = "data/feature_attribute_train.csv"
        write_svmdata_attribute(srcfile, datafile, attribute, side, 0)    
    label_train,data_train = svmutil.svm_read_problem(datafile)    
    modelfile = "model/model_attribute_"+attribute+"_"+side
    m = []
    if not os.path.isfile(modelfile):
        print("train model: " + attribute+"_"+side)
        prob = svmutil.svm_problem(label_train, data_train)
        param = svmutil.svm_parameter('-t 0 -c 4 -b 1 -q')
        m = svmutil.svm_train(prob, param)        
        svmutil.svm_save_model(modelfile, m)
    else:
        print("load model: " + attribute+"_"+side)
        m = svmutil.svm_load_model(modelfile)
    #test
    attribute_info = read_info("data/feature_attribute_test.csv", side)
    datafile = "model/testdata_attribute_"+attribute+"_"+side
    if not os.path.isfile(datafile):
        srcfile = "data/feature_attribute_test.csv"
        write_svmdata_attribute(srcfile, datafile, attribute, side, 1)    
    label_test,data_test = svmutil.svm_read_problem(datafile)
    p_label, p_acc, p_val = svmutil.svm_predict(label_test, data_test, m, '-b 1')
    [precision, recall, f1, accuracy] = getF1(label_test, p_label)
    print "F1: [%.4f, %.4f, %.4f] Accuracy: %.4f" % (precision, recall, f1, accuracy)
    f_result = open("result/attribute_"+attribute+"_"+side+".csv", "w")
    for i in range(len(p_label)):
        f_result.write(attribute_info[i]+", "+str(int(label_test[i]))+", "+str(int(p_label[i]))+", ")
        f_result.write("[%.4f]\n" % p_val[i][0])
    f_result.close()
Exemplo n.º 48
0
def create_svm_problem(feats, labelledRects):
    labels = []
    values = []

    height, width, nd = feats.shape

    for rect in labelledRects:
        x0, y0, x1, y1 = rect.box
        if x0 >= width or y0 >= height:
            continue
        if x1 >= width:
            x1 = width - 1
        if y1 >= height:
            y1 = height - 1
        for x in range(x0, x1 + 1):
            for y in range(y0, y1 + 1):
                vs = []
                for i in feats[y][x]:
                    vs.append(i)
                labels.append(rect.label)
                values.append(vs)

    return svmutil.svm_problem(labels, values)
Exemplo n.º 49
0
def bench_svm(X, Y):
    """
    bench with swig-generated wrappers that come with libsvm
    """

    import svmutil

    X1 = X.tolist()
    Y1 = Y.tolist()

    gc.collect()

    # start time
    tstart = datetime.now()
    problem = svmutil.svm_problem(Y1, X1)
    param = svmutil.svm_parameter()
    param.svm_type=0
    param.kernel_type=2
    model = svmutil.svm_train(problem, param)
    svmutil.svm_predict([0]*len(X1), X1, model)
    delta = (datetime.now() - tstart)
    # stop time
    svm_results.append(delta.seconds + delta.microseconds/mu_second)
Exemplo n.º 50
0
def bench_svm(X, Y):
    """
    bench with swig-generated wrappers that come with libsvm
    """

    import svmutil

    X1 = X.tolist()
    Y1 = Y.tolist()

    gc.collect()

    # start time
    tstart = datetime.now()
    problem = svmutil.svm_problem(Y1, X1)
    param = svmutil.svm_parameter()
    param.svm_type = 0
    param.kernel_type = 2
    model = svmutil.svm_train(problem, param)
    svmutil.svm_predict([0] * len(X1), X1, model)
    delta = (datetime.now() - tstart)
    # stop time
    svm_results.append(delta.seconds + delta.microseconds / mu_second)
Exemplo n.º 51
0
def getSVMValidationPredictions(input, output, folds):
	n = len(input)
	predictions = [None] * n
	
	for fold in range(folds):
		testIn = [input[x] for x in range(n) if x % folds == fold]
		testOut = [output[x] for x in range(n) if x % folds == fold]
		
		trainIn = [input[x] for x in range(n) if x % folds != fold]
		trainOut = [output[x] for x in range(n) if x % folds != fold]
		
		testIndeces = range(fold, n, folds)
		
		#print trainIn[0], trainOut[0]
		prob = svmutil.svm_problem(trainOut, trainIn)
		param = getSvmParam()
		model = svmutil.svm_train(prob, param)
		
		labels, acc, vals = svmutil.svm_predict([0] * len(testOut), testIn, model)
		for index, label in zip(testIndeces, labels):
			predictions[index] = label
			
	return predictions
Exemplo n.º 52
0
def svm_learning_curve(x, y):
    m = len(y)
    n = len(x)
    steep = m / 100;

    training_examples = []
    train_accuracy = []
    validation_accuracy = []

    for i in range(steep, m, steep):
        prob  = svmutil.svm_problem(y[:i], x[:i])
        param = svmutil.svm_parameter('-t 2 -q -c 0.01')
        m = svmutil.svm_train(prob, param)
        
        p_label_train, p_acc_train, p_val_train = svmutil.svm_predict(y[:i], x[:i], m)
        p_label_validation, p_acc_validation, p_val_validation = svmutil.svm_predict(y[i:], x[i:], m)
        print p_acc_train[0], "\t", p_acc_validation[0], "\n"

        training_examples.append(i)
        train_accuracy.append(p_acc_train[0])
        validation_accuracy.append(p_acc_validation[0])

    return training_examples, train_accuracy, validation_accuracy
Exemplo n.º 53
0
def create_svm_problem(feats, labelledRects):
    labels = []
    values = []

    height, width, nd = feats.shape

    for rect in labelledRects:
        x0, y0, x1, y1 = rect.box
        if x0 >= width or y0 >= height:
            continue
        if x1 >= width:
            x1 = width - 1
        if y1 >= height:
            y1 = height - 1
        for x in range(x0, x1 + 1):
            for y in range(y0, y1 + 1):
                vs = []
                for i in feats[y][x]:
                    vs.append(i)
                labels.append(rect.label)
                values.append(vs)

    return svmutil.svm_problem(labels, values)
Exemplo n.º 54
0
def TrainSvmPoly2(Y, X, sweep_c=range(-2,18)):
    num_positives = float(Y.count(1))
    num_negatives = float(Y.count(-1))

    best_c = -1
    best_acc = -1
    for c_pow in sweep_c:
        current_c = np.power(2.0,c_pow)
        param = svm.svm_parameter('-t 1 -d 2 -c %f -w-1 %f -w1 %f -q' % (current_c,
                                                                              100/num_negatives,
                                                                              100/num_positives))
        current_pos_acc, current_neg_acc = CrossValidate(Y, X, param)
        current_acc = current_pos_acc
        print '%f, %f, %f' % (current_c, current_acc, current_neg_acc)
        if best_acc < current_acc:
            best_acc = current_acc
            best_c = current_c

    prob = svm.svm_problem(Y,X)
    param = svm.svm_parameter('-t 1 -d 2 -c %f -w-1 %f -w1 %f -q' % (best_c, 100/num_negatives,
                                                                     100/num_positives))
    svm_model = svm.svm_train(prob, param)
    p_labs, p_acc, p_vals = svm.svm_predict(Y, X, svm_model, '-q')
    return svm_model
Exemplo n.º 55
0
def compute_auc_kern(data, labels, k=10, C=1.0, kern=6, gamma=None):
    ''' This is an interface to the extended libsvm implementation
    with new kernels '''

    kv = cross_validation.StratifiedKFold(labels, n_folds=k, random_state=1)
    s = 0.0

    for train_index, test_index in kv:

        data_train = data[train_index]
        labels_train = labels[train_index]

        data_test = data[test_index]
        labels_test = labels[test_index]

        y_train = labels_train.tolist()
        x_train = data_train.tolist()
        
        prob = svmutil.svm_problem(y_train, x_train)
        if gamma != None:
        	param = svmutil.svm_parameter("-t %i -c %.410f -q -g %i" % (kern, C, gamma))
        else:
        	param = svmutil.svm_parameter("-t %i -c %.410f -q" % (kern, C))

        model = svmutil.svm_train(prob, param)

        y_test = labels_test.tolist()
        x_test = data_test.tolist()
        
        p_label, p_acc, p_val = svmutil.svm_predict(y_test, x_test, m=model)

        fpr, tpr, thresholds = roc_curve(labels_test, p_val, pos_label=-1.0)
        AUC = roc_auc_score(labels_test, p_val)
        s += AUC

    return s / k
Exemplo n.º 56
0
    ##### RBF level 1 grid #####
    # param = SVM.svm_parameter('-t 2 -c 1.126    -g 11.26    -h 0') # 75%

    # param = SVM.svm_parameter('-t 2 -c 112.6    -g 1.126    -h 0') # 87.5%
    # param = SVM.svm_parameter('-t 2 -c 11.26    -g 1.126    -h 0') # 87.9%
    # param = SVM.svm_parameter('-t 2 -c 1.126    -g 1.126    -h 0') # 86%
    # param = SVM.svm_parameter('-t 2 -c 0.1126   -g 1.126    -h 0') # 79%
    # param = SVM.svm_parameter('-t 2 -c 0.01126  -g 1.126    -h 0') # %

    # param = SVM.svm_parameter('-t 2 -c 112.6    -g 0.1126   -h 0') # 83.9%
    # param = SVM.svm_parameter('-t 2 -c 11.26    -g 0.1126   -h 0') # 83.9
    # param = SVM.svm_parameter('-t 2 -c 1.126    -g 0.1126   -h 0') # 82%
    # param = SVM.svm_parameter('-t 2 -c 0.1126   -g 0.1126   -h 0') # 64%
    # param = SVM.svm_parameter('-t 2 -c 0.01126  -g 0.1126   -h 0') #

    ##### RBF level 2 grid #####
    # param = SVM.svm_parameter('-t 2 -c 2  -g 1 -h 0') # 87.5%
    # param = SVM.svm_parameter('-t 2 -c 5  -g 1 -h 0') # 88.06%
    # param = SVM.svm_parameter('-t 2 -c 10 -g 1 -h 0') # 87.9

    ##### RBF level 3 grid #####
    # param = SVM.svm_parameter('-t 2 -c 5  -g 0.2 -h 0') #
    # param = SVM.svm_parameter('-t 2 -c 5  -g 0.5 -h 0') # 86%
    # param = SVM.svm_parameter('-t 2 -c 5  -g 1   -h 0') # 88.06%
    param = SVM.svm_parameter('-t 2 -c 5  -g 2   -h 0')  # 88.06%
    # param = SVM.svm_parameter('-t 2 -c 5  -g 5   -h 0') # 86.55%

    problem = SVM.svm_problem(labTrn.tolist(), datTrn.tolist())
    model = SVM.svm_train(problem, param)
    SVM.svm_predict(labVal.tolist(), datVal.tolist(), model)
Exemplo n.º 57
0
        
        p_label_train, p_acc_train, p_val_train = svmutil.svm_predict(y[:i], x[:i], m)
        p_label_validation, p_acc_validation, p_val_validation = svmutil.svm_predict(y[i:], x[i:], m)
        print p_acc_train[0], "\t", p_acc_validation[0], "\n"

        training_examples.append(i)
        train_accuracy.append(p_acc_train[0])
        validation_accuracy.append(p_acc_validation[0])

    return training_examples, train_accuracy, validation_accuracy

def get_cross_val(x, y, x_val, y_val, gamma_c):
    prob  = svmutil.svm_problem(y, x)
    param = svmutil.svm_parameter('-t 2 -q -c {0} -g {1}'.format(gamma_c.C, gamma_c.gamma))
    m = svmutil.svm_train(prob, param)

    svmutil.svm_save_model("model", m)

    p_label_validation, p_acc_validation, p_val_validation = svmutil.svm_predict(y_val, x_val, m)

    return p_acc_validation[0]


if __name__ == '__main__':
    y, x = svmutil.svm_read_problem("char_recon_shuffled.db")
    gamma = 1.0 / (2.0 * (3.0 ** 7) ** 2)
    C = 3.0 ** 3.0
    prob  = svmutil.svm_problem(y, x)
    param = svmutil.svm_parameter('-t 2 -q -c {0} -g {1}'.format(C, gamma))
    m = svmutil.svm_train(prob, param)
    svmutil.svm_save_model("model", m)
Exemplo n.º 58
0
    def _complete_training(self, debug=False):
        """ Iterate over the complete data to get the initial model """
        ########## read complexities file if given ##########
        if self.complexities_path is not None:
            import yaml
            complexities_file=open(self.complexities_path, 'r')
            complexities = yaml.load(complexities_file)
            # nr of channels    = nr of features (==dim) / features_per_channel
            if not 'features_per_channel' in complexities:
                complexities['features_per_channel'] = 1
            self.complexity = complexities[
                    round(self.dim/complexities['features_per_channel'])]
            self._log("Read complexity %s from file. Dimension is %s" %
                      (self.complexity, self.dim), level=logging.INFO)
            
        # not compatible with regression!
            # self._log("Instances of Class %s: %s, %s: %s" \
            #            % (self.classes[0], 
            #               self.labels.count(self.classes.index(self.classes[0])),
            #               self.classes[1], 
            #               self.labels.count(self.classes.index(self.classes[1]))))
        # instead this?:
        self._log("Performing training of SVM.")
        
        ########## Calculation of default gamma ##########
        self.calculate_gamma()

        self.num_samples = len(self.samples)

        # nr_weight is the number of elements in the array weight_label and
        # weight. Each weight[i] corresponds to weight_label[i], meaning that
        # the penalty of class weight_label[i] is scaled by a factor of 
        # weight[i]. If you do not want to change penalty for any of the 
        # classes, just set nr_weight to 0.
        
        ########## preparation of the libsvm command ##########
        # for probability output add "-b 1" to options
        options = \
            "-c %.42f -d %d -g %.42f -r %.42f -n %.42f -p %.42f -e %.20f -m %.42f" % \
            (self.complexity, self.exponent, self.gamma,
            self.offset, self.nu, self.epsilon, self.tolerance, 1000)
            # use 1000MB instead of 100MB (default)
        # options += " -b 1" un-comment this for probabilistic output!
        if self.multinomial:
            options += " -b 1"
        for i,w in enumerate(self.weight):
            options += " -w%d %.42f" % (i, w)
        if self.kernel_type == 'LINEAR':
            options += " -t 0"
        elif self.kernel_type == 'POLY':
            options += " -t 1"
        elif self.kernel_type == 'RBF':
            options += " -t 2"
        elif self.kernel_type == 'SIGMOID':
            options += " -t 3"
        else:
            self.kernel_type = 'LINEAR'
            options += " -t 0"
            warnings.warn("Kernel unknown! Precomputed Kernels are not " +
                          "yet implemented. Linear Kernel used.")
            # PRECOMPUTED: kernel values in training_set_file 
            #              (not yet implemented)

        if self.svm_type == 'C-SVC':
            options += " -s 0"
        elif self.svm_type == 'nu-SVR':
            options += " -s 1"
        elif self.svm_type == 'one-class SVM':
            options += " -s 2"
        elif self.svm_type == 'epsilon-SVR':
            options += " -s 3"
        else:
            options += " -s 0"
            self.svm_type = 'C-SVC'
            warnings.warn("SVM-type unknown. C-SVC will be used!")
        if not self.debug:
            options += " -q"
            self._log("Libsvm is now quiet!")
        
        old_libsvm_options = options
        
        if self.max_iterations != 0:
            options += " -i %d" % self.max_iterations
        try:
            param = svmutil.svm_parameter(options)
        except ValueError:
            param = svmutil.svm_parameter(old_libsvm_options)
            self._log(
                "Using max_iterations is not supported by the standard " +
                "LIBSVM. Change your Python path to our customized version!",
                level=logging.CRITICAL)

        # transform labels with *label_function*
        if self.str_label_function is not None:
            self.label_function = eval(self.str_label_function)
            self.labels = self.label_function(self.labels)
        
        # build the classifier
        # h = [map(float,list(data)) for data in self.samples]
        problem = svmutil.svm_problem(self.labels, [
            map(float, list(data)) for data in self.samples])
        model = svmutil.svm_train(problem, param)
        if not self.multinomial:
            if (self.svm_type == 'C-SVC' or self.svm_type == 'one-class SVM') \
                    and self.kernel_type == 'LINEAR':
                self.calculate_classification_vector(model)
                if self.debug:
                    # This calculation is needed for further analysis
                    self.calculate_slack_variables(model) 
                    print "LIBSVM Parameter:"
                    self.print_variables()
            else:
                # Slack variables are the same no matter which kernel is used
                # This method is mainly used to reduce the number of samples
                # being stored later on.
                if self.debug:
                    self.calculate_slack_variables(model) 
                self.model = model
        else:
            self.model = model
            # Slack variables are the same no matter which kernel is used
            # This method is mainly used to reduce the number of samples
            # being stored later on.
        
        # read number of iterations needed to solve the problem
        if self.max_iterations != 0:
            try:
                predictor_iterations = model.get_num_iterations()
                self.classifier_information["~~Solver_Iterations~~"] = \
                    predictor_iterations
                if predictor_iterations == 0 or \
                        predictor_iterations == numpy.Inf:
                    self.classifier_information["~~SVM_Converged~~"] = False
                else:
                    self.classifier_information["~~SVM_Converged~~"] = True
            except:
                warnings.warn("Could not read state of the LibSVM Solver " +
                              "from the C-Library!")

        try:
            self.classifier_information["~~offset~~"] = self.b
            self.classifier_information["~~w0~~"] = self.w[0]
            self.classifier_information["~~w1~~"] = self.w[1]
        except:
            pass
        self.delete_training_data()
Exemplo n.º 59
0
x_train, x_test = x[:3451], x[3451:]

# print(x_train)
# print(y_train)

param = svmutil.csr_find_scale_param(x_train, lower=0)
x_train = svmutil.csr_scale(x_train, param)

x_test = svmutil.csr_scale(x_test, param)

best_err_cross = 1.
best_params_cross = None
# best_acc_predict = 0
# best_params_predict = None

prob = svmutil.svm_problem(y_train, x_train)

K = 8
error_values = {}

with open("results\\10_fold_valid.txt", "w") as out_res:
    for d in [1, 2, 3, 4]:
        # for d in [1, 2]:
        error_values[d] = {}

        # for k in range(4, K+1):
        for k in range(-K, K + 1):
            print(f"K = {k}, d = {d}")
            out_res.write(f"K = {k}, d = {d}")

            params = f'-t 1 -d {d} -c {2**k} -r 1 -q'
Exemplo n.º 60
0
    tsx = train_set_x.get_value()
    svm_inputs = [list(tsx[s, :]) for s in range(tsx.shape[0])]

    # use targets to train one svm for each hidden neuron
    print "Training SVMs..."
    probs = []
    params = []
    svms = []
    ws = []
    bs = []
    werrs = 0
    for n in range(hl.n_out):
        print "Hidden neuron: %d" % n,
        print " Problem...",
        if n == 0:
            probs.append(svmutil.svm_problem(svm_targets[n], svm_inputs))
        else:
            probs.append(svmutil.svm_problem(svm_targets[n], None, tmpl=probs[0]))
        params.append(svmutil.svm_parameter("-q -s 0 -t 0 -c 100"))
        print " Training...",
        svms.append(svmutil.svm_train(probs[n], params[n]))
        print " Saving...",
        svmutil.svm_save_model("hidden%04d.svm" % n, svms[n])

        print " Testing..."
        # get weights from SVM
        w, b = get_svm_weights(svms[n], hl.n_in)
        ws.append(w)
        bs.append(b)

        # test model