Exemplo n.º 1
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (self.batch_size, 784))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['wi'], self.params['bi'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Hidden layers.
     for i in range(self.num_hidden - 1):
         y2 = layers.affine(y2, self.params['w%d' % i], self.params['b%d' % i])
         y2 = layers.relu(y2)
     # Second affine layer.
     y3 = layers.affine(y2, self.params['wo'], self.params['bo'])
     return y3
Exemplo n.º 2
0
    def forward(self, X, mode):
        N, sequence_length, D = X.shape
        WX = self.params['WX']
        Wh = self.params['Wh']
        bias_h = self.params['bias_h']
        WY = self.params['WY']
        bias_Y = self.params['bias_Y']
        WY0 = self.params['WY0']
        bias_Y0 = self.params['bias_Y0']

        h = np.zeros((N, self._n_hidden))
        self.previous_h = [h]
        for t in xrange(sequence_length):
            X_t = X[:, t, :]
            h0 = self._update_h(X_t, h, WX, Wh, bias_h)
            projected_h = sum(
                batch_scalar_product(h, h0) * h
                for t, h in enumerate(self.previous_h))
            h = np.dot(X_t, WX) + np.dot(h, Wh) + projected_h
            h = self._nonlinear(h)
            self.previous_h.append(h)

        Y0 = layers.relu(layers.affine(h, WY0, bias_Y0))
        Y = layers.affine(Y0, WY, bias_Y)
        return Y
Exemplo n.º 3
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     # This verifies whether symbols can be reused.
     trash = self.conv(X=np.zeros(X.shape), **self.params)
     return out
Exemplo n.º 4
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     # This verifies whether symbols can be reused.
     trash = self.conv(X=np.zeros(X.shape), **self.params)
     return out
Exemplo n.º 5
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (batch_size, flattened_input_size))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Second affine layer.
     y3 = layers.affine(y2, self.params['w2'], self.params['b2'])
     return y3
Exemplo n.º 6
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (batch_size, 3 * 32 * 32))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Batch normalization
     y3, self.aux_params['running_mean'], self.aux_params['running_var'] = layers.batchnorm(
         y2, self.params['gamma'], self.params['beta'], running_mean=self.aux_params['running_mean'], \
         running_var=self.aux_params['running_var'])
     # Second affine layer.
     y4 = layers.affine(y3, self.params['w2'], self.params['b2'])
     # Dropout
     y5 = layers.dropout(y4, 0.5, mode=mode)
     return y5
Exemplo n.º 7
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (batch_size, 3 * 32 * 32))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Batch normalization
     y3, self.aux_params['running_mean'], self.aux_params['running_var'] = layers.batchnorm(
         y2, self.params['gamma'], self.params['beta'], running_mean=self.aux_params['running_mean'], \
         running_var=self.aux_params['running_var'])
     # Second affine layer.
     y4 = layers.affine(y3, self.params['w2'], self.params['b2'])
     # Dropout
     y5 = layers.dropout(y4, 0.5, mode=mode)
     return y5
Exemplo n.º 8
0
    def forward(self, X, mode):
        N, sequence_length, D = X.shape
        h = np.zeros((N, self._n_hidden))

        WX = self.params['WX']
        Wh = self.params['Wh']
        bias_h = self.params['bias_h']
        WY = self.params['WY']
        bias_Y = self.params['bias_Y']
        WY0 = self.params['WY0']
        bias_Y0 = self.params['bias_Y0']

        self.previous_h = [h]
        for t in xrange(sequence_length):
            X_t = X[:, t, :]
            h = self._update_h(X_t, h, WX, Wh, bias_h)
            h = self._inner_loop(X_t, self.previous_h[-1], h, WX, Wh,
                                 self.previous_h)
            self.previous_h.append(h)

        Y0 = layers.relu(layers.affine(h, WY0, bias_Y0))
        Y = layers.affine(Y0, WY, bias_Y)
        return Y
Exemplo n.º 9
0
 def check_fn(x):
     return layers.l2_loss(layers.relu(x), fake_y)
Exemplo n.º 10
0
 def forward(self, X):
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     y2 = layers.relu(y1)
     y3 = layers.affine(y2, self.params['w2'], self.params['b2'])
     return y3
Exemplo n.º 11
0
 def check_fn(x):
     return layers.softmax_loss(layers.relu(x), fake_y)
Exemplo n.º 12
0
 def forward(self, inputs, *args):
     return layers.relu(inputs)
Exemplo n.º 13
0
 def check_fn(x):
     return layers.softmax_loss(layers.relu(x), fake_y)
Exemplo n.º 14
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params["w1"], self.params["b1"])
     out = layers.relu(out)
     out = layers.affine(out, self.params["w2"], self.params["b2"])
     return out
Exemplo n.º 15
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     return out
Exemplo n.º 16
0
 def forward(self, inputs, *args):
     return layers.relu(inputs)
Exemplo n.º 17
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     return out