示例#1
0
 def test_SVM(self):
     n = int(0.7 * self.data.X.shape[0])
     learn = svm.SVMLearner()
     clf = learn(self.data[:n])
     z = clf(self.data[n:])
     self.assertTrue(
         np.sum(z.reshape((-1, 1)) == self.data.Y[n:]) > 0.7 * len(z))
示例#2
0
def main():
    import sip
    from PyQt4.QtGui import QApplication
    from Orange.classification import logistic_regression, svm
    from Orange.evaluation import testing

    app = QApplication([])
    w = OWLiftCurve()
    w.show()
    w.raise_()

    data = Orange.data.Table("ionosphere")
    results = testing.CrossValidation(
        data,
        [logistic_regression.LogisticRegressionLearner(penalty="l2"),
         logistic_regression.LogisticRegressionLearner(penalty="l1"),
         svm.SVMLearner(probability=True),
         svm.NuSVMLearner(probability=True)
         ],
        store_data=True
    )
    results.fitter_names = ["LR l2", "LR l1", "SVM", "Nu SVM"]
    w.set_results(results)
    rval = app.exec_()

    sip.delete(w)
    del w
    app.processEvents()
    del app
    return rval
示例#3
0
    def apply(self):
        kernel = ["linear", "poly", "rbf", "sigmoid"][self.kernel_type]
        common_args = dict(kernel=kernel,
                           degree=self.degree,
                           gamma=self.gamma,
                           coef0=self.coef0,
                           tol=self.tol,
                           probability=True,
                           preprocessors=self.preprocessors)
        if self.svmtype == 0:
            learner = svm.SVMLearner(C=self.C, **common_args)
        else:
            learner = svm.NuSVMLearner(nu=self.nu, **common_args)
        learner.name = self.learner_name

        classifier = None
        sv = None
        if self.data is not None:
            classifier = learner(self.data)
            classifier.name = self.learner_name
            sv = self.data[classifier.skl_model.support_]

        self.send("Learner", learner)
        self.send("Classifier", classifier)
        self.send("Support vectors", sv)
    def applySettings(self):
        if self.useNu:
            svm_type = svm.SVMLearner.Nu_SVR
            C = self.C_nu
        else:
            svm_type = svm.SVMLearner.Epsilon_SVR
            C = self.C_epsilon

        learner = svm.SVMLearner(svm_type=svm_type,
                                 C=C,
                                 p=self.p,
                                 nu=self.nu,
                                 kernel_type=self.kernel_type,
                                 gamma=self.gamma,
                                 degree=self.degree,
                                 coef0=self.coef0,
                                 eps=self.eps,
                                 probability=self.probability,
                                 normalization=self.normalization,
                                 name=self.name)
        predictor = None
        support_vectors = None
        if self.preprocessor:
            learner = self.preprocessor.wrapLearner(learner)

        if self.data is not None:
            predictor = learner(self.data)
            support_vectors = predictor.support_vectors
            predictor.name = self.name

        self.send("Learner", learner)
        self.send("Predictor", predictor)
        self.send("Support Vectors", support_vectors)
示例#5
0
def main():
    import gc
    import sip
    from PyQt4.QtGui import QApplication
    from Orange.classification import logistic_regression, svm

    app = QApplication([])
    w = OWROCAnalysis()
    w.show()
    w.raise_()

#     data = Orange.data.Table("iris")
    data = Orange.data.Table("ionosphere")
    results = Orange.evaluation.testing.CrossValidation(
        data,
        [logistic_regression.LogisticRegressionLearner(),
         logistic_regression.LogisticRegressionLearner(penalty="l1"),
         svm.SVMLearner(probability=True),
         svm.NuSVMLearner(probability=True)],
        k=5,
        store_data=True,
    )
    results.fitter_names = ["Logistic", "Logistic (L1 reg.)", "SVM", "NuSVM"]
    w.set_results(results)

    rval = app.exec_()
    w.deleteLater()
    sip.delete(w)
    del w
    app.processEvents()
    sip.delete(app)
    del app
    gc.collect()
    return rval
示例#6
0
文件: svm.py 项目: pdphuong/scorpion
    def learn(self,
              data,
              nu=0.01,
              kernel=svm.kernels.RBF,
              svmtype=svm.SVMLearner.OneClass):
        learner = sksvm.OneClassSVM(kernel='rbf', nu=0.01)
        sample = random.sample(data, 8000)
        clf = learner.fit(sample)
        return clf

        learner = svm.SVMLearner(svm_type=svmtype, kernel_type=kernel, nu=nu)
        classifier = learner(data)
        return classifier
from Orange import data
from Orange.classification import svm

brown = data.Table("brown-selected")
classifier = svm.SVMLearner(brown,
                            kernel_type=svm.kernels.Linear,
                            normalization=False)

weights = svm.get_linear_svm_weights(classifier)
print sorted("%.10f" % w for w in weights.values())

import pylab as plt
plt.hist(weights.values())
示例#8
0
from Orange import data
from Orange.classification import svm

vehicle = data.Table("vehicle.tab")

svm_easy = svm.SVMLearnerEasy(name="svm easy", folds=3)
svm_normal = svm.SVMLearner(name="svm")
learners = [svm_easy, svm_normal]

from Orange.evaluation import testing, scoring

results = testing.cross_validation(learners, vehicle, folds=5)
print "Name     CA        AUC"
for learner, CA, AUC in zip(learners, scoring.CA(results),
                            scoring.AUC(results)):
    print "%-8s %.2f      %.2f" % (learner.name, CA, AUC)
示例#9
0

def predict_discrete(predictor, data):
    return predictor(data, Model.ValueProbs)


def predict_continuous(predictor, data):
    values = predictor(data, Model.Value)
    return values, [None] * len(data)


def is_discrete(var):
    return isinstance(var, Orange.data.DiscreteVariable)


if __name__ == "__main__":
    import Orange.classification.svm as svm
    import Orange.classification.logistic_regression as lr
    app = QtGui.QApplication([])
    w = OWPredictions()
    data = Orange.data.Table("iris")
    svm_clf = svm.SVMLearner(probability=True)(data)
    lr_clf = lr.LogisticRegressionLearner()(data)
    w.setData(data)
    w.setPredictor(svm_clf, 0)
    w.setPredictor(lr_clf, 1)
    w.handleNewSignals()
    w.show()
    app.exec_()
    w.saveSettings()
gen = input("Enter number of genuine instances: ")
length = Num*gen #length of input matrices

svmVotesFinal = np.zeros(length/numV) #create arrays for each voting block
knnVotesFinal = np.zeros(length/numV)

for i in range(Num):
    svmCnt = []
    knnCnt = []
    traFile = "C:\\Users\\Parker\\Documents\\NPS Projects\\thesis\\status\\Databases\\train\\%strain.csv" % str(i+1)
    testFile = "C:\\Users\\Parker\\Documents\\NPS Projects\\thesis\\status\\Databases\\train\\%stest.csv" % str(i+1)
    train = data.Table(traFile)
    test = data.Table(testFile)

    svmLearner = svm.SVMLearner(svm_type=svm.SVMLearner.C_SVC, kernel_type=svm.SVMLearner.RBF, kernel_func=None, \
               C=1, nu=0.5, p=0.1, gamma=0.0, degree=3, coef0=0, \
               shrinking=True, probability=True, verbose=False, \
               cache_size=200, eps=0.001, normalization=False)

    svmLearner.tune_parameters(train, parameters=["gamma","C"], folds=8)

    svmClassifier = svmLearner(train)
    knnClassifier = knn.kNNLearner(train, k=8)

    for t in test:
        svmCnt.append(svmClassifier(t))
        knnCnt.append(knnClassifier(t))

    voteIdx = 0
    imp = (length - gen)
    svmVotes = np.zeros(length/numV) #create arrays for each voting block
    knnVotes = np.zeros(length/numV)