Exemplo n.º 1
0
def substitute_model(img_rows=28, img_cols=28, nb_classes=10):
    """
    Defines the model architecture to be used by the substitute. Use
    the example model interface.
    :param img_rows: number of rows in input
    :param img_cols: number of columns in input
    :param nb_classes: number of classes in output
    :return: tensorflow model
    """
    input_shape = (None, img_rows, img_cols, 1)

    # Define a fully connected model (it's different than the black-box)
    layers = [
        Conv2D(64, (8, 8), (2, 2), "SAME"),
        ReLU(),
        Flatten(),
        Linear(200),
        ReLU(),
        Linear(100),
        ReLU(),
        Linear(nb_classes),
        Softmax()
    ]
    return make_basic_cnn()
    return MLP(layers, input_shape)
Exemplo n.º 2
0
def abalone_mlp(nb_classes=2, input_shape=None):
    layers = [Flatten(),
              Linear(10),
              Sigmoid(),
              Linear(10),
              Sigmoid(),
              Flatten(),
              Linear(nb_classes),
              Softmax()
             ]
    model = MLP(layers, input_shape)
    return model
Exemplo n.º 3
0
def fashion_net(n_classes):

    input_shape = ((None, im_size, im_size, im_chan))
    layers = [
        Conv2D(64, (8, 8), (2, 2), "SAME"),
        ReLU(),
        Conv2D(64 * 2, (6, 6), (2, 2), "VALID"),
        ReLU(),
        Conv2D(64 * 2, (5, 5), (1, 1), "VALID"),
        ReLU(),
        Flatten(),
        Linear(n_classes),
        Softmax()
    ]

    model = MLP(layers, input_shape)
    return model
Exemplo n.º 4
0
def make_madry_ngpu(nb_classes=10, input_shape=(None, 28, 28, 1), **kwargs):
    """
    Create a multi-GPU model similar to Madry et al. (arXiv:1706.06083).
    """
    layers = [Conv2DnGPU(32, (5, 5), (1, 1), "SAME"),
              ReLU(),
              MaxPool((2, 2), (2, 2), "SAME"),
              Conv2DnGPU(64, (5, 5), (1, 1), "SAME"),
              ReLU(),
              MaxPool((2, 2), (2, 2), "SAME"),
              Flatten(),
              LinearnGPU(1024),
              ReLU(),
              LinearnGPU(nb_classes),
              Softmax()]

    model = MLPnGPU(layers, input_shape)
    return model
def PAP_substitute_model(img_rows=1, img_cols=2, nb_classes=2):
    """
    Defines the model architecture to be used by the substitute. Use
    the example model interface.
    :param img_rows: number of rows in input
    :param img_cols: number of columns in input
    :param nb_classes: number of classes in output
    :return: tensorflow model
    """
    input_shape = (None, img_rows, img_cols, 1) #code sous cette forme: vient de Papernot, je garde pour pas tout casser...
    # Define a fully connected model (it's different than the black-box)
    layers = [Flatten(),
              Linear(200),
              ReLU(),
              Linear(200),
              ReLU(),
              Linear(nb_classes),
              Softmax()]
    #layers = [Flatten(), Linear(nb_classes), Softmax()] #surrogate simplifié
    return MLP(layers, input_shape)
Exemplo n.º 6
0
def make_imagenet_cnn(input_shape=(None, 224, 224, 3)):
    layers = [
        Conv2D(96, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(256, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(384, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(384, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Conv2D(256, (3, 3), (2, 2), "VALID"),
        ReLU(),
        Flatten(),
        Linear(4096),
        ReLU(),
        Linear(4096),
        ReLU(),
        Linear(1000),
        Softmax()
    ]

    model = MLP(layers, input_shape)
    return model