Example #1
0
 def load_model(self, model_name, dataset_shortname='mnist'):
     if model_name == 'inception':
         self.model = keras.applications.inception_v3.InceptionV3(
             weights=dataset_shortname)
         self.framework = 'keras'
     elif model_name == 'resnet':
         self.model = keras.applications.resnet50.ResNet50(
             weights=dataset_shortname)
         self.framework = 'keras'
     elif model_name == 'vgg':
         self.model = keras.applications.vgg16.VGG16(
             weights=dataset_shortname)
         self.framework = 'keras'
     elif model_name == 'abs':
         url = 'https://github.com/bethgelab/AnalysisBySynthesis'
         self.model = zoo.get_model(url)
         self.framework = 'pytorch'
     elif model_name.endswith(".h5"):
         self.framework = 'keras'
         if "DenseNet_k60_L16_norm" in model_name:
             self.model = keras.models.load_model(
                 model_name,
                 compile=False,
                 custom_objects={'NormalizingLayer01': NormalizingLayer01})
         else:
             self.model = keras.models.load_model(model_name, compile=False)
Example #2
0
def test_loading_model(url, dim):
    # download model
    model = zoo.get_model(url)

    # create a dummy image
    x = np.zeros(dim, dtype=np.float32)
    x[:] = np.random.randn(*x.shape)

    # run the model
    logits = model.predictions(x)
    probabilities = foolbox.utils.softmax(logits)
    predicted_class = np.argmax(logits)

    # sanity check
    assert predicted_class >= 0
    assert np.sum(probabilities) >= 0.9999
Example #3
0
def main(args):
    logging.info('model={}'.format(args.model))
    model = zoo.get_model(url=args.model)
    logging.info('finished acquiring model')

    logging.info('creating attack {}'.format(args.attack))
    attack = foolbox.attacks.FGSM(model)
    logging.info('finished creating attack')

    logging.info('brokers={}'.format(args.brokers))
    logging.info('topic={}'.format(args.topic))
    logging.info('creating kafka consumer')
    consumer = KafkaConsumer(
        args.topic,
        bootstrap_servers=args.brokers,
        value_deserializer=lambda val: loads(val.decode('utf-8')))
    logging.info('finished creating kafka consumer')

    while True:
        for message in consumer:
            image_uri = message.value['url']
            label = message.value['label']
            logging.info('received URI {}'.format(image_uri))
            logging.info('received label {}'.format(label))
            logging.info('downloading image')
            response = requests.get(image_uri)
            img = Image.open(BytesIO(response.content))
            image = np.array(img.getdata()).reshape(img.size[0], img.size[1],
                                                    3)
            logging.info('downloaded image')
            images = np.ndarray(shape=(2, 32, 32, 3), dtype=np.float32)
            images[0] = image
            adversarial = attack(image, label)
            images[1] = adversarial
            logging.info('adversarial image generated')
            #            preds = model.forward(images)  # Foolbox v2.0 only
            preds = model.batch_predictions(images)
            orig_inf = np.argmax(preds[0])
            adv_inf = np.argmax(preds[1])
            logging.info(
                'original inference: {}  adversarial inference: {}'.format(
                    orig_inf, adv_inf))
Example #4
0
# If you want to run it on google Colab
# !pip install https://github.com/bethgelab/foolbox/archive/master.zip
# !pip install randomgen

from foolbox import zoo
url = "https://github.com/bethgelab/cifar10_challenge.git"
model = zoo.get_model(url)

import numpy as np
import keras
import foolbox
import randomgen
from keras.datasets import cifar10
from sklearn.decomposition import PCA

''' Loading cifar-10 dataset'''

(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
y_train=np.reshape(y_train,(50000))
y_test=np.reshape(y_test,(10000))


''' Performing PCA on training data to get all principal components'''

data = np.reshape(x_train,(50000,3072))
pca=PCA(n_components=3072).fit(data)

''' Reading input images'''
from sklearn.decomposition import PCA
from sklearn.utils import shuffle
from matplotlib import pyplot as plt
from tqdm import tqdm
import time
import foolbox
from keras import backend
from keras.models import load_model
from keras.datasets import mnist
from keras.utils import np_utils


##this model if for CIFAR10 only, for other datasets, replace this code-------------
from foolbox import zoo
url = "https://github.com/bethgelab/cifar10_challenge.git"
dNet = zoo.get_model(url)

#for loading some other keras model
backend.set_learning_phase(False)
model = keras.models.load_model('/address/to/your/model.h5')
fmodel = foolbox.models.KerasModel(model, bounds=(0,1))
dNet = fmodel


#Loading cifar-10 dataset-----------------------------------

(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
y_train=np.reshape(y_train,(50000))
y_test=np.reshape(y_test,(10000))
Example #6
0
def load_foolbox_zoo_model(git_url):
    # https://foolbox.readthedocs.io/en/latest/modules/zoo.html
    assert git_url.endswith(GIT_EXTENSION)
    fmodel = zoo.get_model(git_url)

    return fmodel