Exemplo n.º 1
0
    def forward(self, X, mode):
        N, sequence_length, D = X.shape
        WX = self.params['WX']
        Wh = self.params['Wh']
        bias_h = self.params['bias_h']
        WY = self.params['WY']
        bias_Y = self.params['bias_Y']
        WY0 = self.params['WY0']
        bias_Y0 = self.params['bias_Y0']

        h = np.zeros((N, self._n_hidden))
        self.previous_h = [h]
        for t in xrange(sequence_length):
            X_t = X[:, t, :]
            h0 = self._update_h(X_t, h, WX, Wh, bias_h)
            projected_h = sum(
                batch_scalar_product(h, h0) * h
                for t, h in enumerate(self.previous_h))
            h = np.dot(X_t, WX) + np.dot(h, Wh) + projected_h
            h = self._nonlinear(h)
            self.previous_h.append(h)

        Y0 = layers.relu(layers.affine(h, WY0, bias_Y0))
        Y = layers.affine(Y0, WY, bias_Y)
        return Y
Exemplo n.º 2
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     # This verifies whether symbols can be reused.
     trash = self.conv(X=np.zeros(X.shape), **self.params)
     return out
Exemplo n.º 3
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     # This verifies whether symbols can be reused.
     trash = self.conv(X=np.zeros(X.shape), **self.params)
     return out
Exemplo n.º 4
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (batch_size, flattened_input_size))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Second affine layer.
     y3 = layers.affine(y2, self.params['w2'], self.params['b2'])
     return y3
Exemplo n.º 5
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (self.batch_size, 784))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['wi'], self.params['bi'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Hidden layers.
     for i in range(self.num_hidden - 1):
         y2 = layers.affine(y2, self.params['w%d' % i], self.params['b%d' % i])
         y2 = layers.relu(y2)
     # Second affine layer.
     y3 = layers.affine(y2, self.params['wo'], self.params['bo'])
     return y3
Exemplo n.º 6
0
 def forward(self, X, mode):
     h = np.zeros(self.hshape)  # init hidden state
     for t in xrange(self.num_unroll_steps):
         h = layers.rnn_step(X, h, self.params['Wx'],
                             self.params['Wh'], self.params['b'])
     y = layers.affine(h, self.params['Wa'], self.params['ba'])
     return y
Exemplo n.º 7
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (batch_size, 3 * 32 * 32))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Batch normalization
     y3, self.aux_params['running_mean'], self.aux_params['running_var'] = layers.batchnorm(
         y2, self.params['gamma'], self.params['beta'], running_mean=self.aux_params['running_mean'], \
         running_var=self.aux_params['running_var'])
     # Second affine layer.
     y4 = layers.affine(y3, self.params['w2'], self.params['b2'])
     # Dropout
     y5 = layers.dropout(y4, 0.5, mode=mode)
     return y5
Exemplo n.º 8
0
 def forward(self, X, mode):
     h = np.zeros(self.hshape)  # init hidden state
     for t in range(self.num_unroll_steps):
         h = layers.rnn_step(X, h, self.params['Wx'], self.params['Wh'],
                             self.params['b'])
     y = layers.affine(h, self.params['Wa'], self.params['ba'])
     return y
Exemplo n.º 9
0
 def forward(self, X, mode):
     # Flatten the input data to matrix.
     X = np.reshape(X, (batch_size, 3 * 32 * 32))
     # First affine layer (fully-connected layer).
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     # ReLU activation.
     y2 = layers.relu(y1)
     # Batch normalization
     y3, self.aux_params['running_mean'], self.aux_params['running_var'] = layers.batchnorm(
         y2, self.params['gamma'], self.params['beta'], running_mean=self.aux_params['running_mean'], \
         running_var=self.aux_params['running_var'])
     # Second affine layer.
     y4 = layers.affine(y3, self.params['w2'], self.params['b2'])
     # Dropout
     y5 = layers.dropout(y4, 0.5, mode=mode)
     return y5
Exemplo n.º 10
0
 def forward(self, X, mode):
     seq_len = X.shape[1]
     h = self.params['h0']
     for t in xrange(seq_len):
         h = layers.rnn_step(X[:, t, :], h, self.params['Wx'],
                             self.params['Wh'], self.params['b'])
     y = layers.affine(h, self.params['Wa'], self.params['ba'])
     return y
Exemplo n.º 11
0
 def forward(self, X, mode):
     seq_len = X.shape[1]
     h = self.params['h0']
     for t in xrange(seq_len):
         h = layers.rnn_step(X[:, t, :], h, self.params['Wx'],
                             self.params['Wh'], self.params['b'])
     y = layers.affine(h, self.params['Wa'], self.params['ba'])
     return y
Exemplo n.º 12
0
 def forward(self, X, mode):
     for index in range(len(self._dimensions) - 2):
         W, b = self.params['W%d' % index], self.params['b%d' % index]
         network = affine(network, W, b)
         network = self._nonlinear(network)
         shared_W, shared_b = self.params['shared_W%d' %
                                          index], self.params['shared_b%d' %
                                                              index]
         for t in range(self._refining_times):
             residual = affine(network, shared_W, shared_b)
             residual = self._nonlinear(network)
             network = self._decay_rate * network + (
                 1 - self._decay_rate) * residual
     W, b = self.params['W%d' %
                        (len(self._dimensions) -
                         1)], self.params['b%d' %
                                          (len(self._dimensions) - 1)]
     network = affine(network, W, b)
     return network
Exemplo n.º 13
0
 def forward(self, X, mode):
     seq_len = X.shape[1]
     batch_size = X.shape[0]
     hidden_size = self.params['Wh'].shape[0]
     h = np.zeros((batch_size, hidden_size))
     for t in xrange(seq_len):
         h = layers.rnn_step(X[:, t, :], h, self.params['Wx'],
                             self.params['Wh'], self.params['b'])
     y = layers.affine(h, self.params['Wa'], self.params['ba'])
     return y
Exemplo n.º 14
0
 def forward(self, X, mode):
     batch_size = X.shape[0]
     seq_len = X.shape[1]
     X_emb = self.params['W_Emb'][X]
     hm1 = np.zeros((batch_size, self.HID_DIM))
     hs = []
     for t in xrange(seq_len):
         hm1 = self.one_step(X_emb[:,t,:], hm1)
         hs.append(hm1)
     hs = np.stack(hs, axis=1).reshape((batch_size*seq_len, self.HID_DIM))
     pred_out = layers.affine(hs, self.params['W_Softmax'], self.params['b_Softmax'])
     return pred_out.reshape((batch_size, seq_len, self.WORD_DIM))
Exemplo n.º 15
0
    def forward(self, X, mode):
        N, sequence_length, D = X.shape
        h = np.zeros((N, self._n_hidden))

        WX = self.params['WX']
        Wh = self.params['Wh']
        bias_h = self.params['bias_h']
        WY = self.params['WY']
        bias_Y = self.params['bias_Y']
        WY0 = self.params['WY0']
        bias_Y0 = self.params['bias_Y0']

        self.previous_h = [h]
        for t in xrange(sequence_length):
            X_t = X[:, t, :]
            h = self._update_h(X_t, h, WX, Wh, bias_h)
            h = self._inner_loop(X_t, self.previous_h[-1], h, WX, Wh,
                                 self.previous_h)
            self.previous_h.append(h)

        Y0 = layers.relu(layers.affine(h, WY0, bias_Y0))
        Y = layers.affine(Y0, WY, bias_Y)
        return Y
Exemplo n.º 16
0
    def forward(self, X, mode):
        N, sequence_length, D = X.shape
        h = np.zeros((N, self._n_hidden))
        c = np.zeros((N, self._n_hidden))

        WX = self.params['WX']
        Wh = self.params['Wh']
        bias = self.params['bias']
        WY = self.params['WY']
        bias_Y = self.params['bias_Y']

        for t in range(sequence_length):
            X_t = X[:, t, :]
            h, c = layers.lstm_step(X_t, h, c, WX, Wh, bias)

        Y = layers.affine(h, WY, bias_Y)
        return Y
Exemplo n.º 17
0
 def check_fn(w):
     return layers.l2_loss(layers.affine(x, w, b), fake_y)
Exemplo n.º 18
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     return out
Exemplo n.º 19
0
 def forward(self, X):
     y1 = layers.affine(X, self.params['w1'], self.params['b1'])
     y2 = layers.relu(y1)
     y3 = layers.affine(y2, self.params['w2'], self.params['b2'])
     return y3
Exemplo n.º 20
0
 def forward(self, inputs, params):
     return layers.affine(inputs, params[self.weight], params[self.bias])
Exemplo n.º 21
0
 def forward(self, inputs, params):
     return layers.affine(inputs, params[self.weight], params[self.bias])
Exemplo n.º 22
0
 def check_fn(w):
     return layers.softmax_loss(layers.affine(x, w, b), fake_y)
Exemplo n.º 23
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params["w1"], self.params["b1"])
     out = layers.relu(out)
     out = layers.affine(out, self.params["w2"], self.params["b2"])
     return out
Exemplo n.º 24
0
 def forward(self, X, mode):
     out = self.conv(X=X, **self.params)
     out = layers.affine(out, self.params['w1'], self.params['b1'])
     out = layers.relu(out)
     out = layers.affine(out, self.params['w2'], self.params['b2'])
     return out