Пример #1
0
 def check_beta(b):
     y, _, _ = layers.batchnorm(x, gamma, b)
     return layers.l2_loss(y, fake_y)
Пример #2
0
 def check_fn(x):
     return layers.l2_loss(f(x = x, fc_weight = weights), fake_y)
Пример #3
0
 def check_fn(x):
     return layers.l2_loss(layers.relu(x), fake_y)
Пример #4
0
 def check_gamma(g):
     y, _, _ = layers.batchnorm(x, g, beta)
     return layers.l2_loss(y, fake_y)
Пример #5
0
 def check_fn(w):
     return layers.l2_loss(layers.affine(x, w, b), fake_y)
Пример #6
0
 def loss(self, predict, y):
     return layers.l2_loss(predict, y)
Пример #7
0
 def check_fn(x_0):
     return layers.l2_loss(f(x_0=x_0, x_1=x_1), fake_y)
Пример #8
0
 def loss(self, predict, y):
     return layers.l2_loss(predict, y)
Пример #9
0
 def check_fn(x_0):
     return layers.l2_loss(f(x_0=x_0, x_1=x_1), fake_y)