Example #1
0
    def forward(self, input, hidden=None):
        '''
        Args:
            input (Tensor): (effective_batch_size,input_size)
            hidden (Tensor,None): (effective_batch_size,hidden_size)
        Return:
            Tensor: (effective_batch_size,hidden_size)
        '''

        # TODO: INSTRUCTIONS
        # Perform matrix operations to construct the intermediary value from input and hidden tensors
        # Remeber to handle the case when hidden = None. Construct a tensor of appropriate size, filled with 0s to use as the hidden.

        #raise NotImplementedError('Implement Forward')
        effective_batch_size, input_size = input.shape
        if hidden is None:
            requires_grad = True
            hidden = Tensor(np.zeros((effective_batch_size, self.hidden_size)),
                            requires_grad=requires_grad,
                            is_leaf=not requires_grad)

        sigmoid_ = Sigmoid()
        tanh_ = Tanh()
        r_t = sigmoid_(
            input.matmul(self.weight_ir) + self.bias_ir +
            hidden.matmul(self.weight_hr) + self.bias_hr)
        z_t = sigmoid_(
            input.matmul(self.weight_iz) + self.bias_iz +
            hidden.matmul(self.weight_hz) + self.bias_hz)
        n_t = tanh_(
            input.matmul(self.weight_in) + self.bias_in + r_t *
            (hidden.matmul(self.weight_hn) + self.bias_hn))
        h_t = (Tensor(1) - z_t) * n_t + z_t * hidden
        return h_t
Example #2
0
    def forward(self, x):
        """
        Args:
            x (Tensor): (batch_size, in_features)
        Returns:
            Tensor: (batch_size, out_features)
        """

        return Tensor.matmul(x,self.weight.T()) + self.bias
Example #3
0
    def forward(self, input, hidden=None):
        '''
        Args:
            input (Tensor): (effective_batch_size,input_size)
            hidden (Tensor,None): (effective_batch_size,hidden_size)
        Return:
            Tensor: (effective_batch_size,hidden_size)
        '''

        # TODO: INSTRUCTIONS
        # Perform matrix operations to construct the intermediary value from input and hidden tensors
        # Apply the activation on the resultant
        # Remeber to handle the case when hidden = None. Construct a tensor of appropriate size, filled with 0s to use as the hidden.
        effective_batch_size, input_size = input.shape
        if hidden is None:
            requires_grad = True
            hidden = Tensor(np.zeros((effective_batch_size, self.hidden_size)),
                            requires_grad=requires_grad,
                            is_leaf=not requires_grad)
        #raise NotImplementedError('Implement Forward')

        res = input.matmul(self.weight_ih) + self.bias_ih + hidden.matmul(
            self.weight_hh) + self.bias_hh
        return self.act(res)