Пример #1
0
 def test_manual_module(self):
   np.random.seed(42)
   torch.manual_seed(42)
   for test_num in range(10):
     n_batch = int(np.random.choice(range(32, 128)))
     n_neurons = int(np.random.choice(range(1, 10)))
     x = 2 * torch.randn(n_batch, n_neurons, requires_grad=True) + 10
     bn_manual_mod = CustomBatchNormManualModule(n_neurons)
     y_manual_mod = bn_manual_mod(x)
     self.assertLess(np.max(np.abs(y_manual_mod.mean(dim=0).data.numpy())), 1e-5)
     self.assertLess(np.max(np.abs(y_manual_mod.var(dim=0).data.numpy() - 1)), 1e-1)
Пример #2
0
    def __init__(self, n_inputs, n_hidden, n_classes):
        """
    Initializes MLP object. 
    
    Args:
      n_inputs: number of inputs.
      n_hidden: list of ints, specifies the number of units
                in each linear layer. If the list is empty, the MLP
                will not have any linear layers, and the model
                will simply perform a multinomial logistic regression.
      n_classes: number of classes of the classification problem.
                 This number is required in order to specify the
                 output dimensions of the MLP
    
    Implement initialization of the network.
    """

        ########################
        # PUT YOUR CODE HERE  #
        #######################
        super(MLP, self).__init__()
        self.layers = []
        in_features = n_inputs
        for out_features in n_hidden:
            linear = nn.Linear(in_features, out_features)
            ##batchnorm = nn.BatchNorm1d(out_features)
            # dropout = nn.Dropout(0.2)
            relu = nn.ReLU()
            self.layers.append(linear)
            ##self.layers.append(batchnorm)
            self.layers.append(CustomBatchNormManualModule(out_features))
            # self.layers.append(dropout)
            self.layers.append(relu)

            in_features = out_features
        #dropout = nn.Dropout()
        #self.layers.append(dropout)
        linear = nn.Linear(in_features, n_classes)
        softmax = nn.Softmax()
        self.layers.append(linear)
        # self.layers.append(softmax)
        self.sequential = nn.Sequential(*self.layers)