示例#1
0
def _LeakyReluGradGrad(op, grad):
    x = op.inputs[1]
    alpha = op.get_attr("alpha")
    return (gen_nn_ops.leaky_relu_grad(grad, x, alpha=alpha),
            array_ops.zeros(shape=array_ops.shape(x), dtype=x.dtype))
示例#2
0
def _LeakyReluGrad(op, grad):
    x = op.inputs[0]
    alpha = op.get_attr("alpha")
    return gen_nn_ops.leaky_relu_grad(grad, x, alpha=alpha)
示例#3
0
def _LeakyReluGrad(op, grad):
  x = op.inputs[0]
  alpha = op.get_attr("alpha")
  return gen_nn_ops.leaky_relu_grad(grad, x, alpha=alpha)
示例#4
0
def _LeakyReluGradGrad(op, grad):
  x = op.inputs[1]
  alpha = op.get_attr("alpha")
  return (gen_nn_ops.leaky_relu_grad(grad, x, alpha=alpha),
          array_ops.zeros(shape=array_ops.shape(x), dtype=x.dtype))
示例#5
0
def _LeakyReluGradGrad(op, grad):
    x = op.inputs[1]
    alpha = op.get_attr("alpha")
    return (gen_nn_ops.leaky_relu_grad(grad, x,
                                       alpha=alpha), array_ops.zeros_like(x))