Esempio n. 1
0
def make_madry_ngpu(nb_classes=10, input_shape=(None, 28, 28, 1), **kwargs):
    """
    Create a multi-GPU model similar to Madry et al. (arXiv:1706.06083).
    """
    layers = [Conv2DnGPU(32, (5, 5), (1, 1), "SAME"),
              ReLU(),
              MaxPool((2, 2), (2, 2), "SAME"),
              Conv2DnGPU(64, (5, 5), (1, 1), "SAME"),
              ReLU(),
              MaxPool((2, 2), (2, 2), "SAME"),
              Flatten(),
              LinearnGPU(1024),
              ReLU(),
              LinearnGPU(nb_classes),
              Softmax()]

    model = MLPnGPU(layers, input_shape)
    return model
Esempio n. 2
0
 def _fully_connected(self, x, out_dim):
     """FullyConnected layer for final output."""
     if self.init_layers:
         fc = LinearnGPU(out_dim, w_name="DW")
         fc.name = "logits"
         self.layers += [fc]
     else:
         fc = self.layers[self.layer_idx]
         self.layer_idx += 1
     fc.device_name = self.device_name
     fc.set_training(self.training)
     return fc.fprop(x)
Esempio n. 3
0
 def _fully_connected(self, x, out_dim):
     """FullyConnected layer for final output."""
     if self.init_layers:
         fc = LinearnGPU(out_dim, w_name='DW')
         fc.name = 'logits'
         self.layers += [fc]
     else:
         fc = self.layers[self.layer_idx]
         self.layer_idx += 1
     fc.device_name = self.device_name
     fc.set_training(self.training)
     return fc.fprop(x)