def test_reduction_deriv(transformer_factory): C = ng.make_axis(name='C') W = ng.make_axis(name='W') H = ng.make_axis(name='H') delta = .001 C.length = 4 W.length = 10 H.length = 10 axes = ng.make_axes([C, W, H]) u = rng.discrete_uniform(1.0, 2.0, 2 * delta, axes) # Need to test max/min differently since if two elements are extremums # and we modify one, the derivative will change. for npred, bered, red in [(np.sum, ng.sum, 'sum')]: for reduction_axes in [[C], [W], [H], [C, W], [W, H]]: p_u = ng.placeholder(axes) graph_reduce = bered(p_u, reduction_axes=reduction_axes) check_derivative(graph_reduce, p_u, delta, u, atol=1e-1, rtol=1e-1)
def test_log_sigmoid_deriv(transformer_factory): """TODO.""" axes = ng.make_axes([ng.make_axis(20), ng.make_axis(128)]) p_u = ng.placeholder(axes) u = rng.uniform(-3.0, 3.0, p_u.axes) log_val_u = ng.log(ng.sigmoid(p_u)) check_derivative(log_val_u, p_u, 0.001, u, atol=1e-2, rtol=1e-2)
def test_softmax_deriv(transformer_factory): N = ng.make_axis(name='N', batch=True) W = ng.make_axis(name='W') W.length = 3 N.length = 10 axes = ng.make_axes([W, N]) x = rng.uniform(0, 1, axes) p_x = ng.placeholder(axes) check_derivative(ng.softmax(p_x), p_x, 0.001, x, atol=1e-2, rtol=1e-2)
def test_print_op_bprop(): """ Ensure bprop of PrintOp is correct (passes through exactly the delta) """ A = ng.make_axis(10, name='A') x = ng.placeholder(ng.make_axes([A])) # randomly initialize x_value = rng.uniform(-1, 1, x.axes) check_derivative(ng.PrintOp(x), x, 0.001, x_value, atol=1e-3, rtol=1e-3)
def test_reciprocal_derivative(transformer_factory): """TODO.""" N = ng.make_axis(name='N') W = ng.make_axis(name='W') delta = .001 W.length = 20 N.length = 128 axes = ng.make_axes([W, N]) p_u = ng.placeholder(axes) u = rng.uniform(.1, 5.0, p_u.axes) rec_u = ng.reciprocal(p_u) check_derivative(rec_u, p_u, delta, u, atol=1e-2, rtol=1e-2)
def test_dimshuffle_bprop(transformer_factory): """ dimshuffle a 2d array and make sure bprop works """ A = ng.make_axis(2) B = ng.make_axis(3) x = ng.placeholder(ng.make_axes([A, B])) # randomly initialize x_value = rng.uniform(-1, 1, x.axes) check_derivative( ng.axes_with_order(x, [B, A]), x, 0.001, x_value, atol=1e-3, rtol=1e-3 )
def test_elementwise_binary_ops_matched_args_deriv_rhs(transformer_factory): """TODO.""" axes = ng.make_axes([ng.make_axis(20), ng.make_axis(20)]) for np_op, be_op in ELEMENTWISE_BINARY_OPS: # Matched sizes p_u = ng.placeholder(axes) p_v = ng.placeholder(axes) u = rng.uniform(-1.0, 1.0, p_u.axes) v = rng.uniform(1.0, 2.0, p_v.axes) check_derivative( be_op(p_u, p_v), p_v, 0.001, v, parameters=[p_u], parameter_values=[u], atol=1e-3, rtol=1e-3, )
def test_cross_entropy_softmax_deriv(transformer_factory): N = ng.make_axis(name='N', batch=True) W = ng.make_axis(name='W') W.length = 3 N.length = 10 axes = ng.make_axes([W, N]) p_x = ng.placeholder(axes) p_t = ng.placeholder(axes) x = rng.uniform(0, 1, axes) t = np_softmax(rng.uniform(0, 1, axes), 0) check_derivative( ng.cross_entropy_multi(ng.softmax(p_x), p_t), p_x, 0.001, x, parameters=[p_t], parameter_values=[t], atol=1e-2, rtol=1e-2 )