def test_LinearSVM(self): # This warning is irrelevant here warnings.filterwarnings("ignore", ".*", ConvergenceWarning) learn = LinearSVMLearner() res = CrossValidation(self.data, [learn], k=2) self.assertGreater(CA(res)[0], 0.8) self.assertLess(CA(res)[0], 0.9)
def test_LinearSVM(self): n = int(0.7 * self.data.X.shape[0]) learn = LinearSVMLearner() clf = learn(self.data[:n]) z = clf(self.data[n:]) self.assertTrue( np.sum(z.reshape((-1, 1)) == self.data.Y[n:]) > 0.7 * len(z))
def test_reprs(self): lr = LogisticRegressionLearner(tol=0.0002) m = MajorityLearner() nb = NaiveBayesLearner() rf = RandomForestLearner(bootstrap=False, n_jobs=3) st = SimpleTreeLearner(seed=1, bootstrap=True) sm = SoftmaxRegressionLearner() svm = SVMLearner(shrinking=False) lsvm = LinearSVMLearner(tol=0.022, dual=False) nsvm = NuSVMLearner(tol=0.003, cache_size=190) osvm = OneClassSVMLearner(degree=2) tl = TreeLearner(max_depth=3, min_samples_split=1) knn = KNNLearner(n_neighbors=4) el = EllipticEnvelopeLearner(store_precision=False) srf = SimpleRandomForestLearner(n_estimators=20) learners = [lr, m, nb, rf, st, sm, svm, lsvm, nsvm, osvm, tl, knn, el, srf] for l in learners: repr_str = repr(l) new_l = eval(repr_str) self.assertEqual(repr(new_l), repr_str)
def test_LinearSVM(self): learn = LinearSVMLearner() res = CrossValidation(self.data, [learn], k=2) self.assertGreater(CA(res)[0], 0.8) self.assertLess(CA(res)[0], 0.9)
def test_LinearSVM(self): learn = LinearSVMLearner() res = Orange.evaluation.CrossValidation(self.data, [learn], k=2) self.assertTrue(0.8 < Orange.evaluation.CA(res)[0] < 0.9)
print("##########TASK 1####################") data_tab = Table('iris') feature_vars = list(data_tab.domain[:-1]) class_label_var = data_tab.domain[len(data_tab.domain) - 1] iris_domain = Domain(feature_vars, class_label_var) data_tab = Table.from_table(domain=iris_domain, source=data_tab) print("DOMAIN: %s \nVARIABLES: %s \nATTRIBUTES: %s \nCLASS_VAR: %s" % (data_tab.domain, data_tab.domain.variables, data_tab.domain.attributes, data_tab.domain.class_var)) print(len(data_tab)) print("###########TASK 2###################") svm_learner = LinearSVMLearner() #Accuracy of cross validation: 0.940 #AUC: 0.955 eval_results = CrossValidation(data_tab, [svm_learner], k=10) print("Accuracy of cross validation: {:.3f}".format(scoring.CA(eval_results)[0])) print("AUC: {:.3f}".format(scoring.AUC(eval_results)[0])) print("###########TASK 3###################") data_tab_2d = data_tab[:50, ['sepal width', 'sepal length', 'iris']] data_tab_2d.extend(data_tab[100:, ['sepal width', 'sepal length', 'iris']]) learner = LinearSVMLearner() results = learner(data_tab_2d) area_x_min = np.min(data_tab_2d[:, 'sepal width']) - 0.2 area_x_max = np.max(data_tab_2d[:, 'sepal width']) + 0.2 area_y_min = np.min(data_tab_2d[:, 'sepal length']) - 0.2