Exemple #1
0
    def setUpClass(cls):
        master_seed(seed=1234)
        super().setUpClass()

        model = xgb.XGBClassifier(n_estimators=30, max_depth=5)
        model.fit(cls.x_train_iris, np.argmax(cls.y_train_iris, axis=1))

        cls.classifier = XGBoostClassifier(model=model)
    def test_XGBoost(self):
        model = XGBClassifier(n_estimators=4, max_depth=6)
        model.fit(self.x_train, np.argmax(self.y_train, axis=1))

        classifier = XGBoostClassifier(model=model, nb_features=self.n_features, nb_classes=self.n_classes)

        rt = RobustnessVerificationTreeModelsCliqueMethod(classifier=classifier)
        average_bound, verified_error = rt.verify(x=self.x_test, y=self.y_test, eps_init=0.3,
                                                  nb_search_steps=10, max_clique=2, max_level=2)

        self.assertEqual(average_bound, 0.03186914062500001)
        self.assertEqual(verified_error, 0.99)
Exemple #3
0
    def setUpClass(cls):
        np.random.seed(seed=1234)
        (x_train, y_train), (x_test, y_test), _, _ = load_dataset('iris')

        cls.x_train = x_train
        cls.y_train = y_train
        cls.x_test = x_test
        cls.y_test = np.argmax(y_test, axis=1)

        model = xgb.XGBClassifier(n_estimators=30, max_depth=5)
        model.fit(x_train, np.argmax(y_train, axis=1))

        cls.classifier = XGBoostClassifier(model=model)
Exemple #4
0
    def setUpClass(cls):
        master_seed(seed=1234)
        super().setUpClass()

        num_round = 10
        param = {
            "objective": "multi:softmax",
            "metric": "multi_logloss",
            "num_class": 3
        }
        train_data = xgb.DMatrix(cls.x_train_iris, label=cls.y_train_iris)
        eval_list = [(train_data, "train")]
        model = xgb.train(param, train_data, num_round, eval_list)

        cls.classifier = XGBoostClassifier(model=model, nb_classes=3)
Exemple #5
0
    def setUpClass(cls):
        np.random.seed(seed=1234)
        (x_train, y_train), (x_test, y_test), _, _ = load_dataset('iris')

        cls.x_train = x_train
        cls.y_train = y_train
        cls.x_test = x_test
        cls.y_test = np.argmax(y_test, axis=1)

        num_round = 10
        param = {'objective': 'multi:softmax', 'metric': 'multi_logloss', 'num_class': 3}
        train_data = xgb.DMatrix(cls.x_train, label=cls.y_train)
        evallist = [(train_data, 'train')]
        model = xgb.train(param, train_data, num_round, evallist)

        cls.classifier = XGBoostClassifier(model=model, nb_classes=3)
Exemple #6
0
    parser.add_argument('n', type=int, help='number of features')
    parser.add_argument('--feature_start', type=int, default=0, choices=[0,1], help='feature number starts from which index? For cod-rna and higgs, this should be 0.')

    args = parser.parse_args()
    d_name = args.data_name
    m_name = args.model_name
    data_path = args.testset
    model_path = args.m
    nclasses = args.c
    nfeatures = args.n
    f_start = args.feature_start
    
    model = xgb.Booster()
    model.load_model(model_path)
    
    classifier = XGBoostClassifier(model=model, clip_values=(0, 1), nb_features=nfeatures, nb_classes=nclasses)
    
    test_data, test_label = load_svmlight_file(data_path, n_features = nfeatures)
    test_data = test_data.toarray()
    test_label = test_label.astype('int')
    n = len(test_label)
    df = pd.DataFrame(test_data)
    df['label'] = test_label
    df = df.sample(frac=1)
    test_label = df['label'].tolist()
    test_data = np.array(df.drop(columns=['label']))   
    
    predictions = np.argmax(classifier.predict(test_data), axis=1)
        
    attack = HopSkipJump(classifier=classifier, norm = np.inf)
    n_selected = 100
x_train = x_train.reshape((n_train, n_features))
x_test = x_test.reshape((n_test, n_features))

x_train = x_train[:NB_TRAIN]
y_train = y_train[:NB_TRAIN]
x_test = x_test[:NB_TEST]
y_test = y_test[:NB_TEST]

master_seed(42)

# 1. XGBoost

model = XGBClassifier(n_estimators=4, max_depth=6)
model.fit(x_train, np.argmax(y_train, axis=1))

classifier = XGBoostClassifier(model=model, nb_features=n_features, nb_classes=n_classes)

rt = RobustnessVerificationTreeModelsCliqueMethod(classifier=classifier)
average_bound, verified_error = rt.verify(x=x_test, y=y_test, eps_init=0.3, nb_search_steps=10, max_clique=2,
                                          max_level=2)

print('Average bound:', average_bound)
print('Verified error at eps:', verified_error)
"""
Average bound: 0.035996093750000006
Verified error at eps: 0.96
"""

# 2. LightGBM

train_data = lightgbm.Dataset(x_train, label=np.argmax(y_train, axis=1))
nb_samples_test = x_test.shape[0]
x_train = x_train.reshape((nb_samples_train, 28 * 28))
x_test = x_test.reshape((nb_samples_test, 28 * 28))

# Step 2: Create the model

params = {"objective": "multi:softprob", "metric": "accuracy", "num_class": 10}
dtrain = xgb.DMatrix(x_train, label=np.argmax(y_train, axis=1))
dtest = xgb.DMatrix(x_test, label=np.argmax(y_test, axis=1))
evals = [(dtest, "test"), (dtrain, "train")]
model = xgb.train(params=params, dtrain=dtrain, num_boost_round=2, evals=evals)

# Step 3: Create the ART classifier

classifier = XGBoostClassifier(
    model=model, clip_values=(min_pixel_value, max_pixel_value), nb_features=28 * 28, nb_classes=10
)

# Step 4: Train the ART classifier

# The model has already been trained in step 2

# Step 5: Evaluate the ART classifier on benign test examples

predictions = classifier.predict(x_test)
accuracy = np.sum(np.argmax(predictions, axis=1) == np.argmax(y_test, axis=1)) / len(y_test)
print("Accuracy on benign test examples: {}%".format(accuracy * 100))

# Step 6: Generate adversarial test examples
attack = ZooAttack(
    classifier=classifier,