def check_beta(b): y, _, _ = layers.batchnorm(x, gamma, b) return layers.l2_loss(y, fake_y)
def check_fn(x): return layers.l2_loss(f(x = x, fc_weight = weights), fake_y)
def check_fn(x): return layers.l2_loss(layers.relu(x), fake_y)
def check_gamma(g): y, _, _ = layers.batchnorm(x, g, beta) return layers.l2_loss(y, fake_y)
def check_fn(w): return layers.l2_loss(layers.affine(x, w, b), fake_y)
def loss(self, predict, y): return layers.l2_loss(predict, y)
def check_fn(x_0): return layers.l2_loss(f(x_0=x_0, x_1=x_1), fake_y)