def loss(self, predict, y): # Add L2 regularization for all the weights. reg_loss = 0.0 for name, weight in self.params.items(): reg_loss += np.sum(weight**2) * 0.5 return layers.softmax_cross_entropy(predict, y) + weight_decay * reg_loss
def loss(self, predict, y): return layers.softmax_cross_entropy(predict, y)
def loss(self, predict, y): # Compute softmax loss between the output and the label. return layers.softmax_cross_entropy(predict, y)
def check_fn(weights): return layers.softmax_cross_entropy( f(x=x, softmax_label=fake_y, fc_weight=weights), fake_y)
def loss(self, predict, y): # Add L2 regularization for all the weights. reg_loss = 0.0 for name, weight in self.params.items(): reg_loss += np.sum(weight ** 2) return layers.softmax_cross_entropy(predict, y) + 0.5 * weight_decay * reg_loss