def forward_propagation(self, x): """ Runs a forward propagation of the model for a batch of examples, activating all layers besides the top one. The function should return the linear input to the top layer, i.e., non-normalized scores, with higher scores corresponding to greater probability of an image belonging a particular class. Inputs: x: batch with shape [batch_size, 1, 28, 28] Output: A Tensor with shape [batch_size, 10] containing predicted scores (logits) """ # normalize data to improve performance x = x / 127.5 - 1.0 # flatten the input image data to a 2d matrix in the shape [N, d] x = nn.reshape(x, [x.shape[0], -1], inplace=True) # define the structure of the network to get the logits result from x ####################################################################### "*** YOUR CODE HERE ***" logits = None hidden1=nn.matmul(x,self.param['w1'])+self.param['b1'] z1=nn.relu(hidden1) hidden2=nn.matmul(z1,self.param['w2'])+self.param['b2'] #z2=nn.ReLU(hidden2) logits=hidden2 ####################################################################### return logits
def forward_propagation(self, x): """ Runs a forward propagation of the model for a batch of examples, activating all layers besides the top one. The function should return the linear input to the top layer, i.e., non-normalized scores, with higher scores corresponding to greater probability of an image belonging a particular class. Inputs: x: batch with shape [batch_size, 1, 28, 28] Output: A Tensor with shape [batch_size, 10] containing predicted scores (logits) """ # normalize data to improve performance x = x / 127.5 - 1.0 # flatten the input image data to a 2d matrix in the shape [N, d] x = nn.reshape(x, [x.shape[0], -1], inplace=True) # define the structure of the network to get the logits result from x ####################################################################### "*** YOUR CODE HERE ***" y1 = nn.matmul(x, self.param["w"]) + self.param["b"] y1 = nn.relu(y1) y2 = nn.matmul(y1, self.param["w1"]) + self.param["b1"] y2 = nn.relu(y2) ''' y3 = nn.matmul(y2_out, self.param["w2"]) + self.param["b2"] y3_out = nn.relu(y3) y4 = nn.matmul(y3_out, self.param["w3"]) + self.param["b3"] y4_out = nn.relu(y4) ''' logits = nn.matmul(y2, self.param["w2"]) + self.param["b2"] #logits = nn.sigmoid(logits) ####################################################################### return logits