示例#1
0
    def _create_clf(dnn):
        """Initialize the DNR classifier passing a single `layer_clf`"""
        layers = ['conv2', 'relu']
        combiner = CClassifierSVM(kernel=CKernelRBF(gamma=1), C=1)
        layer_clf = CClassifierSVM(kernel=CKernelRBF(gamma=1), C=1)

        return CClassifierDNR(combiner, layer_clf, dnn, layers, -inf)
    def test_set(self):

        from secml.ml.kernels import CKernelRBF
        multiclass = CClassifierMulticlassOVA(classifier=CClassifierSVM,
                                              C=1,
                                              kernel=CKernelRBF())
        # Test set before training
        multiclass.set_params({'C': 100, 'kernel.gamma': 20})
        for clf in multiclass._binary_classifiers:
            self.assertEqual(clf.C, 100.0)
            self.assertEqual(clf.kernel.gamma, 20.0)

        # Restoring kernel
        multiclass.set('kernel', CKernelRBF(gamma=50))

        # Setting different parameter in single trained_classifiers
        multiclass.prepare(num_classes=4)
        different_c = (10, 20, 30, 40)
        multiclass.set('C', different_c)
        different_gamma = (50, 60, 70, 80)
        multiclass.set('kernel.gamma', different_gamma)

        # Fit multiclass classifier than test set after training
        multiclass.fit(self.dataset)

        for clf_idx, clf in enumerate(multiclass._binary_classifiers):
            self.assertEqual(clf.C, different_c[clf_idx])
            self.assertEqual(clf.kernel.gamma, different_gamma[clf_idx])

        # Test set after training
        multiclass.set_params({'C': 30, 'kernel.gamma': 200})
        for clf in multiclass._binary_classifiers:
            self.assertEqual(clf.C, 30.0)
            self.assertEqual(clf.kernel.gamma, 200.0)

        for clf in multiclass._binary_classifiers:
            self.assertEqual(clf.C, 30.0)
            self.assertEqual(clf.kernel.gamma, 200.0)

        # Setting parameter in single trained_classifiers
        multiclass._binary_classifiers[0].kernel.gamma = 300
        for i in range(1, multiclass.num_classifiers):
            self.assertNotEqual(multiclass._binary_classifiers[i].kernel.gamma,
                                300.0)

        # Setting different parameter in single trained_classifiers
        different_c = (100, 200, 300)

        # ValueError is raised as not enough binary classifiers are available
        with self.assertRaises(ValueError):
            multiclass.set('C', different_c)

        multiclass.prepare(num_classes=3)
        multiclass.set('C', different_c)
        for clf_idx, clf in enumerate(multiclass._binary_classifiers):
            self.assertEqual(clf.C, different_c[clf_idx])
示例#3
0
    def test_explanation_svm_rbf(self):
        self._clf = CClassifierSVM(kernel=CKernelRBF(gamma=0.01), C=10)
        self._clf.kernel.gamma = 0.01
        self._clf.store_dual_vars = True
        self._clf_idx = 'rbf-svm'

        self._test_explanation_simple_clf()
示例#4
0
    def setUp(self):

        import numpy as np
        np.random.seed(12345678)

        # generate synthetic data
        self.ds = CDLRandom(n_classes=3,
                            n_features=2,
                            n_redundant=0,
                            n_clusters_per_class=1,
                            class_sep=1,
                            random_state=0).load()

        # Add a new class modifying one of the existing clusters
        self.ds.Y[(self.ds.X[:, 0] > 0).logical_and(
            self.ds.X[:, 1] > 1).ravel()] = self.ds.num_classes

        # self.kernel = None
        self.kernel = CKernelRBF(gamma=10)

        # Data normalization
        self.normalizer = CNormalizerMinMax()
        self.ds.X = self.normalizer.fit_transform(self.ds.X)

        self.multiclass = CClassifierMulticlassOVA(classifier=CClassifierSVM,
                                                   class_weight='balanced',
                                                   preprocess=None,
                                                   kernel=self.kernel)
        self.multiclass.verbose = 0

        # Training and classification
        self.multiclass.fit(self.ds.X, self.ds.Y)

        self.y_pred, self.score_pred = self.multiclass.predict(
            self.ds.X, return_decision_function=True)
    def setUp(self):

        # 100 samples, 2 classes, 20 features
        self.ds = CDLDigits().load()

        self.clf = CClassifierMulticlassOVA(CClassifierSVM,
                                            kernel=CKernelRBF(gamma=1e-3))

        # Training classifier
        self.clf.fit(self.ds.X, self.ds.Y)
        self.explainer = CExplainerGradientInput(self.clf)
    def setUpClass(cls):

        CUnitTest.setUpClass()

        # 100 samples, 2 classes, 20 features
        cls.ds = CDLDigits().load()

        cls.clf = CClassifierMulticlassOVA(CClassifierSVM,
                                           kernel=CKernelRBF(gamma=1e-3))

        # Training classifier
        cls.clf.fit(cls.ds.X, cls.ds.Y)
示例#7
0
    def setUp(self):

        import numpy as np
        np.random.seed(12345678)

        self._dataset_creation()

        self.kernel = CKernelRBF(gamma=1)

        self.multiclass = CClassifierMulticlassOVA(
            classifier=CClassifierSVM, class_weight='balanced',
            preprocess=None, kernel=self.kernel)
        self.multiclass.verbose = 0

        self.multiclass = CClassifierRejectThreshold(self.multiclass, 0.6)

        # Training and classification
        self.multiclass.fit(self.ds.X, self.ds.Y)

        self.y_pred, self.score_pred = self.multiclass.predict(
            self.ds.X, return_decision_function=True)
    def test_svm(self):

        self.X = CArray([[1, 2], [3, 4], [5, 6], [7, 8]])
        self.Y = CArray([[0], [1], [1], [0]]).ravel()
        self.dataset = CDataset(self.X, self.Y)

        self.classifier = CClassifierSVM(kernel=CKernelRBF())
        self.classifier.fit(self.dataset)

        self.x_min, self.x_max = (self.X[:, [0]].min() - 1,
                                  self.X[:, [0]].max() + 1)
        self.y_min, self.y_max = (self.X[:, [1]].min() - 1,
                                  self.X[:, [1]].max() + 1)

        self.fig = CFigure(height=7,
                           width=10,
                           linewidth=5,
                           fontsize=24,
                           markersize=20)
        self.fig.sp.title("Svm Test")

        self.logger.info("Test plot dataset method...")

        self.fig.sp.plot_ds(self.dataset)

        self.logger.info("Test plot path method...")
        path = CArray([[1, 2], [1, 3], [1.5, 5]])
        self.fig.sp.plot_path(path)

        self.logger.info("Test plot function method...")
        bounds = [(self.x_min, self.x_max), (self.y_min, self.y_max)]
        self.fig.sp.plot_fun(self.classifier.decision_function,
                             plot_levels=False,
                             grid_limits=bounds,
                             y=1)

        self.fig.sp.xlim(self.x_min, self.x_max)
        self.fig.sp.ylim(self.y_min, self.y_max)

        self.fig.show()
    def setUpClass(cls):

        CAttackEvasionCleverhansTestCases.setUpClass()

        cls.seed = 0

        cls.y_target = None

        cls.clf = CClassifierMulticlassOVA(CClassifierSVM,
                                           kernel=CKernelRBF(gamma=10),
                                           C=0.1,
                                           preprocess=CNormalizerMinMax())

        cls.ds = CDLRandomBlobs(n_features=0,
                                centers=[[0.1, 0.1], [0.5, 0], [0.8, 0.8]],
                                cluster_std=0.01,
                                n_samples=100,
                                random_state=cls.seed).load()

        cls.clf.fit(cls.ds.X, cls.ds.Y)

        cls.x0 = CArray([0.6, 0.2])
        cls.y0 = CArray(cls.clf.predict(cls.x0))
示例#10
0
# secml_sklearn_clf.fit(ds_tr_secml)
# preds = secml_sklearn_clf.predict(ds_te_secml.X)
# metric = CMetricAccuracy()
# acc = metric.performance_score(y_true = ds_te_secml.Y, y_pred = preds)
# print("Accuracy on test set: {:.2%}".format(acc))
# probs = secml_sklearn_clf.predict_proba(ds_te_secml.X)       #Doesn't work
#
# #sklearn here isn't supported for performing adversarial attacks, only the native SVM of secml supports adversarial attacks
# ###############################################################
#
# =============================================================================
x, y = ds_te_secml[:, :].X, ds_te_secml[:, :].Y  # This won't work if we want to specify the target
#class for each example

#secml_clf = CClassifierMulticlassOVA(CClassifierSVM, kernel = CKernelRBF(gamma = 10), C = 1)
secml_clf = CClassifierSVM(kernel=CKernelRBF(gamma=10), C=1)
secml_clf.fit(ds_tr_secml)
preds = secml_clf.predict(ds_te_secml.X)
metric = CMetricAccuracy()
acc = metric.performance_score(y_true=ds_te_secml.Y, y_pred=preds)
print("Accuracy on test set: {:.2%}".format(acc))

#Performing the attack
noise_type = 'l2'
dmax = 0.4
lb, ub = None, None  # with 0, 1 it goes out of bounds
y_target = None  #### Here y_target can be some class, indicating which class is expected for the adversarial example

#solver_params = {
#    'eta': 0.3,
#    'max_iter': 100,
                           test_size=setSamplesValidationNumber,
                           random_state=random_state)

training, validation = splitter.split(dataset)

# Normalize the data
normalizer = CNormalizerMinMax()
training.X = normalizer.fit_transform(training.X)
validation.X = normalizer.transform(validation.X)
test.X = normalizer.transform(test.X)

# Metric to use for training and performance evaluation
metric = CMetricAccuracy()

# Creation of the multiclass classifier
classifier = CClassifierSVM(kernel=CKernelRBF(gamma=10), C=1)

# We can now fit the classifier
classifier.fit(training.X, training.Y)
print("Training of classifier complete!")

# Compute predictions on a test set
predictionY = classifier.predict(test.X)

# Bounds of the attack space. Can be set to `None` for unbounded
lowerBound, upperBound = validation.X.min(), validation.X.max()

# Should be chosen depending on the optimization problem
solver_params = {
    'eta': 0.05,
    'eta_min': 0.05,
 def test_explanation_svm_rbf(self):
     self._clf = CClassifierSVM(kernel=CKernelRBF(gamma=0.01), C=10)
     self._clf_idx = 'rbf-svm'
     self._test_explanation_simple_clf()
te_X, te_Y = CArray(te_X), CArray(te_Y)

ds_tr_secml = CDataset(tr_X, tr_Y)
#print(ds_tr_secml.classes, ds_tr_secml.num_classes, ds_tr_secml.num_features, ds_tr_secml.num_samples)
ds_te_secml = CDataset(te_X, te_Y)
ds_cv_secml = CDataset(cv_X, cv_Y)

normalizer = CNormalizerMinMax()
ds_tr_secml.X = normalizer.fit_transform(ds_tr_secml.X)
ds_te_secml.X = normalizer.transform(ds_te_secml.X)
ds_cv_secml.X = normalizer.transform(ds_cv_secml.X)

x0, y0 = ds_te_secml[0, :].X, ds_te_secml[0, :].Y

secml_clf = CClassifierMulticlassOVA(CClassifierSVM,
                                     kernel=CKernelRBF(gamma=10),
                                     C=1)
secml_clf.fit(ds_tr_secml)
preds = secml_clf.predict(ds_te_secml.X)
metric = CMetricAccuracy()
acc = metric.performance_score(y_true=ds_te_secml.Y, y_pred=preds)
print("Accuracy on test set: {:.2%}".format(acc))

#Performing the attack
noise_type = 'l2'
dmax = 1
lb, ub = None, None  # with 0, 1 it goes out of bounds
y_target = None  #### Here y_target can be some class, indicating which class is expected for the adversarial example

x0, y0 = ds_te_secml[0, :].X, ds_te_secml[0, :].Y
示例#14
0
x_train, x_val, y, y_val = train_test_split(x_train,
                                            y,
                                            test_size=n_val,
                                            train_size=n_tr,
                                            random_state=random_state)

training_data = CDataset(x_train, y)
validation_data = CDataset(x_val, y_val)
test_data = CDataset(xtt, ytt)

del xtr
del ytr

metric = CMetricAccuracy()

clf = CClassifierSVM(kernel=CKernelRBF(gamma=GAMMA), C=C)

# We can now fit the classifier
clf.fit(training_data.X, training_data.Y)
print("Training of classifier complete!")
# Compute predictions on a test set
y_pred = clf.predict(test_data.X)

lb, ub = validation_data.X.min(), validation_data.X.max(
)  # Bounds of the attack space. Can be set to `None` for unbounded
n_poisoning_points = int(
    n_tr * poison_percentage)  # Number of poisoning points to generate

# Should be chosen depending on the optimization problem
solver_params = {
    'eta': 0.05,