Beispiel #1
0
 def loss(self, predict, y):
     # Add L2 regularization for all the weights.
     reg_loss = 0.0
     for name, weight in self.params.items():
         reg_loss += np.sum(weight**2) * 0.5
     return layers.softmax_cross_entropy(predict,
                                         y) + weight_decay * reg_loss
Beispiel #2
0
 def loss(self, predict, y):
     return layers.softmax_cross_entropy(predict, y)
 def loss(self, predict, y):
     # Compute softmax loss between the output and the label.
     return layers.softmax_cross_entropy(predict, y)
Beispiel #4
0
 def loss(self, predict, y):
     return layers.softmax_cross_entropy(predict, y)
Beispiel #5
0
 def check_fn(weights):
     return layers.softmax_cross_entropy(
             f(x=x, softmax_label=fake_y, fc_weight=weights),
             fake_y)
Beispiel #6
0
 def loss(self, predict, y):
     # Add L2 regularization for all the weights.
     reg_loss = 0.0
     for name, weight in self.params.items():
         reg_loss += np.sum(weight ** 2)
     return layers.softmax_cross_entropy(predict, y) + 0.5 * weight_decay * reg_loss
Beispiel #7
0
 def check_fn(weights):
     return layers.softmax_cross_entropy(
         f(x=x, softmax_label=fake_y, fc_weight=weights), fake_y)
Beispiel #8
0
 def loss(self, predict, y):
     # Compute softmax loss between the output and the label.
     return layers.softmax_cross_entropy(predict, y)