Beispiel #1
0
def substitute_model(img_rows=28, img_cols=28, nb_classes=10):
    """
    Defines the model architecture to be used by the substitute. Use
    the example model interface.
    :param img_rows: number of rows in input
    :param img_cols: number of columns in input
    :param nb_classes: number of classes in output
    :return: tensorflow model
    """
    input_shape = (None, img_rows, img_cols, 1)

    # Define a fully connected model (it's different than the black-box)
    layers = [
        Conv2D(64, (8, 8), (2, 2), "SAME"),
        ReLU(),
        Flatten(),
        Linear(200),
        ReLU(),
        Linear(100),
        ReLU(),
        Linear(nb_classes),
        Softmax()
    ]
    return make_basic_cnn()
    return MLP(layers, input_shape)
def cifar_net(n_classes):

    input_shape = ((None, im_size, im_size, im_chan))
    layers = [
        Conv2D(128, (5, 5), (1, 1), "SAME"),
        ReLU(),
        Conv2D(128, (3, 3), (1, 1), "SAME"),
        ReLU(),
        Conv2D(64, (3, 3), (2, 2), "SAME"),
        ReLU(),
        Conv2D(64, (3, 3), (1, 1), "SAME"),
        ReLU(),
        Conv2D(64, (3, 3), (1, 1), "SAME"),
        ReLU(),
        Conv2D(32, (3, 3), (2, 2), "SAME"),
        ReLU(),
        Conv2D(12, (3, 3), (1, 1), "SAME"),
        ReLU(),
        Flatten(),
        Linear(256),
        ReLU(),
        Linear(n_classes)
    ]

    model = MLP(layers, input_shape)
    return model
Beispiel #3
0
def setup_simple_model_tf(model_pt, input_shape):
    from cleverhans_tutorials.tutorial_models import MLP, Linear, ReLU
    layers = [Linear(10), ReLU(), Linear(10)]
    layers[0].name = 'fc1'
    layers[1].name = 'relu'
    layers[2].name = 'fc2'
    model = MLP(layers, input_shape)
    load_weights_pt(model_pt, layers)
    return model
def abalone_mlp(nb_classes=2, input_shape=None):
    layers = [Flatten(),
              Linear(10),
              Sigmoid(),
              Linear(10),
              Sigmoid(),
              Flatten(),
              Linear(nb_classes),
              Softmax()
             ]
    model = MLP(layers, input_shape)
    return model
def fashion_net(n_classes):

    input_shape = ((None, im_size, im_size, im_chan))
    layers = [
        Conv2D(64, (8, 8), (2, 2), "SAME"),
        ReLU(),
        Conv2D(64 * 2, (6, 6), (2, 2), "VALID"),
        ReLU(),
        Conv2D(64 * 2, (5, 5), (1, 1), "VALID"),
        ReLU(),
        Flatten(),
        Linear(n_classes),
        Softmax()
    ]

    model = MLP(layers, input_shape)
    return model
def PAP_substitute_model(img_rows=1, img_cols=2, nb_classes=2):
    """
    Defines the model architecture to be used by the substitute. Use
    the example model interface.
    :param img_rows: number of rows in input
    :param img_cols: number of columns in input
    :param nb_classes: number of classes in output
    :return: tensorflow model
    """
    input_shape = (None, img_rows, img_cols, 1) #code sous cette forme: vient de Papernot, je garde pour pas tout casser...
    # Define a fully connected model (it's different than the black-box)
    layers = [Flatten(),
              Linear(200),
              ReLU(),
              Linear(200),
              ReLU(),
              Linear(nb_classes),
              Softmax()]
    #layers = [Flatten(), Linear(nb_classes), Softmax()] #surrogate simplifié
    return MLP(layers, input_shape)
def substitute_model(img_rows=32, img_cols=32, nb_classes=10):
    """
    Defines the model architecture to be used by the substitute. Use
    the example model interface.
    :param img_rows: number of rows in input
    :param img_cols: number of columns in input
    :param nb_classes: number of classes in output
    :return: tensorflow model
    """
    input_shape = (None, img_rows, img_cols, 3)

    # Define a fully connected model (it's different than the black-box)
    layers = [Flatten(),
              SimpleLinear(200),
              ReLU(),
              SimpleLinear(200),
              ReLU(),
              SimpleLinear(nb_classes),
              SoftmaxT1()]

    return MLP(layers, input_shape)
Beispiel #8
0
def make_imagenet_cnn(input_shape=(None, 224, 224, 3)):
    layers = [
        Conv2D(96, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(256, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(384, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(384, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(256, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Flatten(),
        Linear(4096),
        ReLU(),
        Linear(4096),
        ReLU(),
        Linear(1000),
        Softmax()
    ]

    model = MLP(layers, input_shape)
    return model
def substitute_model(phase, img_rows=32, img_cols=32, nb_classes=10):
    """
    Defines the model architecture to be used by the substitute. Use
    the example model interface.
    :param img_rows: number of rows in input
    :param img_cols: number of columns in input
    :param nb_classes: number of classes in output
    :return: tensorflow model
    """
    input_shape = (None, img_rows, img_cols, 3)

    # Define a fully connected model (it's different than the black-box)
    '''
    layers = [Flatten(),
              SimpleLinear(200),
              ReLU(),
              SimpleLinear(200),
              ReLU(),
              SimpleLinear(nb_classes),
              SoftmaxT1()]
    '''
    nb_filters = 64
    layers = [
        Conv2D(False, nb_filters, (8, 8), (2, 2), "SAME", phase, 'conv1'),
        ReLU(),
        Conv2D(False, nb_filters * 2, (6, 6), (2, 2), "VALID", phase, 'conv2'),
        ReLU(),
        Conv2D(False, nb_filters * 2, (5, 5), (1, 1), "VALID", phase, 'conv3'),
        ReLU(),
        Flatten(),
        SimpleLinear(nb_classes),
        SoftmaxT1()
    ]

    model = MLP(layers, input_shape)
    print('Finished making basic cnn')
    return model