Пример #1
0
    def forward_propagation(self, x):
        """
        Runs a forward propagation of the model for a batch of examples, activating 
        all layers besides the top one. 

        The function should return the linear input to the top layer, i.e.,
        non-normalized scores, with higher scores corresponding to greater 
        probability of an image belonging a particular class. 

        Inputs:
            x: batch with shape [batch_size, 1, 28, 28]
        Output:
            A Tensor with shape [batch_size, 10] containing predicted scores (logits)
        """

        # normalize data to improve performance
        x = x / 127.5 - 1.0
        # flatten the input image data to a 2d matrix in the shape [N, d]
        x = nn.reshape(x, [x.shape[0], -1], inplace=True)

        # define the structure of the network to get the logits result from x
        #######################################################################
        "*** YOUR CODE HERE ***"
        Y1 = nn.matmul(x, self.param["w"]) + self.param["b"]
        Y1_out = nn.relu(Y1)

        Y2 = nn.matmul(Y1_out, self.param["w1"]) + self.param["b1"]
        Y2_out = nn.relu(Y2)

        logits = nn.matmul(Y2_out, self.param["w2"]) + self.param["b2"]
        #######################################################################

        return logits
Пример #2
0
    def forward_propagation(self, x):
        """
        Runs a forward propagation of the model for a batch of examples, activating
        all layers besides the top one.

        The function should return the linear input to the top layer, i.e.,
        non-normalized scores, with higher scores corresponding to greater
        probability of an image belonging a particular class.

        Inputs:
            x: batch with shape [batch_size, 1, 28, 28]
        Output:
            A Tensor with shape [batch_size, 10] containing predicted scores (logits)
        """

        # normalize data to improve performance
        x = x / 127.5 - 1.0
        # flatten the input image data to a 2d matrix in the shape [N, d]
        x = nn.reshape(x, [x.shape[0], -1], inplace=True)

        # define the structure of the network to get the logits result from x
        #######################################################################
        "*** YOUR CODE HERE ***"
        logits = None
        #print(self.param['w1'])
        #print(self.param['b1'])
        internal_hidden_layer_z1 = nn.relu(
            nn.matmul(x, self.param['w1']) + self.param['b1'])
        internal_hidden_layer_z2 = nn.sigmoid(
            nn.matmul(internal_hidden_layer_z1, self.param['w2']) +
            self.param['b2'])
        internal_hidden_layer_z3 = nn.prelu(
            nn.matmul(internal_hidden_layer_z2, self.param['w3']) +
            self.param['b3'], 0.0001)
        logits = nn.matmul(internal_hidden_layer_z3,
                           self.param['w4']) + self.param['b4']

        #######################################################################

        return logits