Exemplo n.º 1
0
 def __call__(self, x):
     """ Forward data through the network.
     
     This allows us to conveniently initialize a model `m` and then send data through it
     to be classified by calling `m(x)`.
     
     Parameters
     ----------
     x : Union[numpy.ndarray, mygrad.Tensor], shape=(N, D, S)
         The data to forward through the network.
         
     Returns
     -------
     mygrad.Tensor, shape=(N, 1)
         The model outputs.
     
     Notes
     -----
     N = batch size
     D = embedding size
     S = sentence length
     """
     # <COGINST>
     # (N, D, S) with D = 200 and S = 77
     x = self.conv1(x) # conv output shape (N, F, S') with F = 250 and S' = 75
     x = relu(x)
     x = max_pool(x, (x.shape[-1],), 1) # global pool output shape (N, F, S') with F = 250, S' = 1
     x = x.reshape(x.shape[0], -1)  # (N, F, 1) -> (N, F)
     x = self.dense1(x) # (N, F) @ (F, D1) = (N, D1)
     x = relu(x) 
     x = self.dense2(x) # (N, D1) @ (D1, 1) = (N, 1)
     x = sigmoid(x)
     return x # output shape (N, 1)
Exemplo n.º 2
0
 def policyForward(self, data):
     data = mg.Tensor(data)
     x = self.conv1(data)
     x = self.conv2(x)
     x = relu(self.dense1(x.reshape(x.shape[0], -1)))
     x = relu(self.dense2(x))
     return self.dense3(x)
 def __call__(self, x):
     ''' Forward data through the network.
     
     This allows us to conveniently initialize a model `m` and then send data through it
     to be classified by calling `m(x)`.
     
     Parameters
     ----------
     x : Union[numpy.ndarray, mygrad.Tensor], shape=(N, D)
         The data to forward through the network.
         
     Returns
     -------
     mygrad.Tensor, shape=(N, 1)
         The model outputs.
     '''
     # returns output of dense -> relu -> dense -> relu -> dense -> softmax three layer.
     temp1 = max_pool(
             self.conv2(relu(max_pool(self.conv1(x), pool=(2,2),stride=1))), 
             pool=(2,2),stride=1)
     s = temp1.shape
     #print(temp1.shape)
     temp1 = temp1.reshape(s[0], s[1]*s[2]*s[3])
     #print(temp1.shape)
     return self.dense3(relu(temp1))
Exemplo n.º 4
0
 def __call__(self, x):
     x = relu(self.conv1(x))
     x = max_pool(relu(self.conv2(x)), (2, 2), (2, 2))
     x = relu(self.conv3(x))
     x = max_pool(relu(self.conv4(x)), (2, 2), (2, 2))
     x = x.reshape(x.shape[0], 256)
     x = relu(self.dense1(x))
     x = self.dense2(x)
     return x
Exemplo n.º 5
0
    def __call__(self, x):
        step1 = max_pool(relu(self.conv1(x)), (2, 2), stride=2)
        step2 = max_pool(relu(self.conv2(step1)), (2, 2), stride=2)

        flatten = step2.reshape(len(x), )
        dense_layers = self.dense2(relu(self.dense1(flatten)))

        return dense_layers
        # returns output of dense -> relu -> dense -> relu -> dense -> softmax three layer.

        pass
Exemplo n.º 6
0
 def __call__(self, x):
     x = self.conv1(x)
     x = max_pool(x, (2, 2), 2)
     x = self.conv2(x)
     x = max_pool(x, (2, 2), 2)
     x = relu(self.dense1(x.reshape(x.shape[0], -1)))
     return self.dense2(x)
Exemplo n.º 7
0
    def __call__(self, x):
        ''' Forward data through the network.

        This allows us to conveniently initialize a model `m` and then send data through it
        to be classified by calling `m(x)`.
        
        Parameters
        ----------
        x : Union[numpy.ndarray, mygrad.Tensor], shape=(N, D)
            The data to forward through the network.
            
        Returns
        -------
        mygrad.Tensor, shape=(N, 1)
            The model outputs.
        '''
        # STUDENT CODE
        # if num_filters = 10; (N, C, 32, 32) --> (N, 10, 28, 28)
        # if num_filters = 10; (N, C, 32, 32) --> (N, 10, 28, 28)
        x = self.conv1(x)
        # (N, 10, 28, 28) --> (N, 10, 14, 14)
        x = max_pool(x, (2,2), 2)
        # if num_filters = 20; (N, 10, 14, 14) --> (N, 20, 10, 10)
        x = self.conv2(x)
        # (N, 20, 10, 10) --> (N, 20, 5, 5)
        x = max_pool(x, (2,2), 2)
        # (N, 20, 5, 5) -reshape-> (N, 500) x (500, 20) -> (N, 20)
        x = relu(self.dense1(x.reshape(x.shape[0], -1)))
        # (N, 20) -> (N, 10)
        return self.dense2(x)
    def __call__(self, x):
        ''' Defines a forward pass of the model.
        
        Parameters
        ----------
        x : numpy.ndarray, shape=(N, 1, 28, 28)
            The input data, where N is the number of images.
            
        Returns
        -------
        mygrad.Tensor, shape=(N, 10)
            The class scores for each of the N images.
        
        Pseudo-code
        -----------
        >>> create dropout object
        >>> compute the first convolutional layer by doing x.conv1
        >>> Perform ReLU by using relu(x)
        >>> Perform dropout by using x.dropout()
        >>> Use max_pool(x, size_pool, stride) to perform the pooling layer
        >>> repeat once 
        >>> perform two dense layers with ReLU dropout in between
        '''

        #first conv layer
        x = self.conv1(x)
        x = relu(x)
        x = self.dropout(x)
        x = max_pool(x, (2, 2), 2)

        #second conv layer
        x = self.conv2(x)
        x = relu(x)
        x = self.dropout(x)
        x = max_pool(x, (2, 2), 2)

        #performing the two dense layers
        x = x.reshape(x.shape[0], -1)

        x = self.dense1(x)
        x = relu(x)
        x = self.dropout(x)
        x = self.dense2(x)

        return x
Exemplo n.º 9
0
 def __call__(self, x):
     """ Performs a "forward-pass" of data through the network.
     This allows us to conveniently initialize a model `m` and then send data through it
     to be classified by calling `m(x)`.
     Parameters
     ----------
     x : Union[numpy.ndarray, mygrad.Tensor], shape=(M, ?)
         A batch of data consisting of M pieces of data,
         each with a dimentionality of ? (the number of
         values among all the pixels in a given image).
     Returns
     -------
     mygrad.Tensor, shape-(M, num_class)
         The model's prediction for each of the M images in the batch,
     """
     x = relu(self.conv1(x))
     x = max_pool(x, (2, 2), 2)
     x = relu(self.conv2(x))
     x = max_pool(x, (2, 2), 2)
     x = relu(self.dense1(x.reshape(x.shape[0], -1)))
     return self.dense2(x)
Exemplo n.º 10
0
    def __call__(self, x):
        #         convol1 = relu(self.conv1(x))
        #         pool1 = max_pool(convol1, pool=(2,2), stride=2)
        #         convol2 = relu(self.conv2(pool1))
        #         pool2 = max_pool(convol2, pool=(2,2), stride=2)
        #         flattened = pool2.reshape((len(x), 250))
        #         den3 = relu(self.dense3(flattened))
        #         den4 = self.dense4(den3)
        #         return den4
        #print(self.conv1(x).shape)
        step1 = max_pool(self.conv1(x), (2, 2), stride=2)
        step2 = max_pool(self.conv2(step1), (2, 2), stride=2)
        flatten = step2.reshape(-1, 500)
        dense_layers = self.dense2(relu(self.dense1(flatten)))

        return dense_layers
        # returns output of dense -> relu -> dense -> relu -> dense -> softmax three layer.

        pass
Exemplo n.º 11
0
    def __call__(self, x):
        """ Performs the full forward pass for the RNN.

        Note that we only care about the last y - the final classification scores for the full sequence

        Parameters
        ----------
        x: Union[numpy.ndarray, mygrad.Tensor], shape=(M, N, 50)
            The word embeddings for the sequence

        Returns
        -------
        mygrad.Tensor, shape=(M,)
            The final classification of the sequence
        """
        h = np.zeros((1, self.fc_h2h.weight.shape[0]), dtype=np.float32)
        for x_t in x:
            h = relu(self.fc_x2h(x_t[np.newaxis]) + self.fc_h2h(h))

        return self.fc_h2y(h)
Exemplo n.º 12
0
 def __call__(self, x):
     return self.dense3(relu(self.dense2(relu(self.dense1(x)))))
Exemplo n.º 13
0
 def __call__(self, X):
     X = relu(mg.matmul(X, self.w1, True) + self.b1)
     X = relu(mg.matmul(X, self.w2, True) + self.b2)
     X = mg.matmul(X, self.w3, True) + self.b3
     return mg.nnet.activations.softmax(X, constant=True)