Ejemplo n.º 1
0
def compare_optimizer_variable_select(opt_ng, opt_ref):

    # Set up data placeholders
    C = ng.make_axis(20)
    N = ng.make_axis(32, name='N')

    data = ng.placeholder([C, N])
    target = ng.placeholder([N])

    # params to be updated using optimizer to be tested
    np_W1 = np.random.rand(C.length)
    np_W2 = np.random.rand(C.length)
    W1 = ng.variable([C], initial_value=np_W1)
    W2 = ng.variable([C], initial_value=np_W2)

    # Set up op graph
    cost = ng.sum(target - ng.dot(W1, data) - ng.dot(W2, data), out_axis=())
    updated_weights = ng.sequential([opt_ng(cost, variables=[W1]), W1])

    # Set up the computation and run the "train" loop
    with ExecutorFactory() as ex:
        opt_ng_comp = ex.transformer.computation([updated_weights, W2], data, target)
        mock_dataset = data_generator(20, C.length, N.length)

        for x, y in mock_dataset:
            [ng_W1, ng_W2] = opt_ng_comp(x, y)  # updated weights for ngraph optimizer
            np_W1 = opt_ref(x, np_W1)   # updated weights for reference optimizer

            ng.testing.assert_allclose(np_W1, ng_W1, rtol=1e-3)
            ng.testing.assert_allclose(np_W2, ng_W2, rtol=1e-3)
Ejemplo n.º 2
0
def test_deriv_missing_connection(N):
    """
    Taking the derivative of an expression with respect to a variable not
    used to compute the expression should raise an exception.
    """
    x = ng.variable([N])
    y = ng.variable([N])
    z = ng.variable([N])

    with pytest.raises(ValueError):
        ng.deriv(x + y, z)
Ejemplo n.º 3
0
def test_variable():
    input_axes = ng.make_axes([ng.make_axis(10), ng.make_axis(3)])
    var = ng.variable(axes=input_axes)
    assign_val = np.random.rand(10, 3)
    var_assign = ng.AssignOp(tensor=var, val=assign_val)
    var_seq = ng.sequential([var_assign, var])
    var_comp = ng.computation(var_seq, "all")
    results = dict()
    weight_saver = Saver()
    with closing(ngt.make_transformer()) as transformer:
        var_func = transformer.add_computation(var_comp)
        weight_saver.setup_save(transformer=transformer, computation=var_comp)
        results['saved'] = var_func().copy()
        weight_saver.save(filename="test_variable")

    reassign_val = np.random.rand(10, 3)
    var_reassign = ng.AssignOp(tensor=var, val=reassign_val)

    var_recomp = ng.computation(var_reassign, "all")
    var_read = ng.computation(var, "all")
    with closing(ngt.make_transformer()) as restore_transformer:
        var_recompfunc = restore_transformer.add_computation(var_recomp)
        weight_saver.setup_restore(transformer=restore_transformer,
                                   computation=var_recomp,
                                   filename="test_variable")
        var_readfunc = restore_transformer.add_computation(var_read)
        var_recompfunc()
        results['reassigned'] = var_readfunc().copy()
        weight_saver.restore()
        results['restored'] = var_readfunc().copy()
    os.remove("test_variable.npz")
    assert np.allclose(results['saved'], assign_val, atol=0)
    assert np.allclose(results['reassigned'], reassign_val, atol=0)
    assert np.allclose(results['saved'], results['restored'], atol=0)
Ejemplo n.º 4
0
def test_weight_clipping(w_clip, optimizer):
    opt_ng = optimizer(0.1, weight_clip_value=w_clip)

    # Set up data placeholders
    C = ng.make_axis(20)
    N = ng.make_axis(32, name='N')

    data = ng.placeholder([C, N])
    target = ng.placeholder([N])

    # params to be updated using optimizer to be tested
    # make sure initial values are higher than clip values
    np_W = 10 * w_clip * (2 * np.random.rand(C.length) - 1)
    W = ng.variable([C], initial_value=np_W)

    # double check generated initial W value
    assert np.max(np_W) > w_clip
    assert np.min(np_W) < -w_clip

    # Set up op graph
    cost = ng.sum(target - ng.dot(W, data), out_axis=())

    updated_weights = ng.sequential([opt_ng(cost), W])

    epsilon = w_clip * 1e-3
    # Set up the computation and run the "train" loop
    with ExecutorFactory() as ex:
        opt_ng_comp = ex.transformer.computation(updated_weights, data, target)
        mock_dataset = data_generator(20, C.length, N.length)

        for x, y in mock_dataset:
            ng_W = opt_ng_comp(x, y)  # updated weights for ngraph optimizer

            assert np.max(ng_W) < w_clip + epsilon
            assert np.min(ng_W) > -w_clip - epsilon
def test_variable_init(C):
    w_init = np.random.rand(C.length)
    W = ng.variable(ng.make_axes([C]), initial_value=w_init)

    with ExecutorFactory() as ex:
        result = ex.executor(W)()
    ng.testing.assert_allclose(result, w_init)
Ejemplo n.º 6
0
def test_pad_0(N):
    """
    pad with length 0 should be a nop
    """
    x = ng.variable([N])

    assert ng.pad(x, [0]).axes == x.axes
def test_initial_value():
    # Test work-around for issue #1138
    w = [3, 4, 5]
    x = ng.constant(w)
    y = ng.variable([ng.make_axis(length=len(w))], initial_value=x)
    with ExecutorFactory() as ex:
        result = ex.executor(y)()
    ng.testing.assert_allclose(result, np.asarray(w, dtype=np.float32))
Ejemplo n.º 8
0
def test_pad_invalid_paddings_length(N):
    """
    pad should raise an exception if the paddings length is not the same as the
    input dimensionality.
    """
    x = ng.variable([N])
    with pytest.raises(ValueError):
        ng.pad(x, [1, 0])
Ejemplo n.º 9
0
def test_modify_state():
    with ExecutorFactory() as ex:
        N = ng.make_axis(3, name='N')
        x_np = np.ones((N.length)) * 4
        x = ng.variable([N], initial_value=x_np).named('x')
        val = ng.sequential([ng.assign(x, x + x), x])
        f = ex.executor(val)
        x_val = f()
        assert np.allclose(x_np + x_np, x_val)
def test_sign():
    x_np = np.array([-1.2, 2.3, 0.0, 1.2])
    N = ng.make_axis(len(x_np))
    x = ng.variable([N])
    y = ng.sign(x)
    y_np = np.sign(x_np)
    with ExecutorFactory() as ex:
        y_val = ex.executor(y, x)(x_np)
        assert np.allclose(y_val, y_np)
def test_sequential(N):
    x = ng.variable([N], initial_value=0)
    x0 = x + x
    x1 = x + x
    p = ng.sequential([x0, ng.assign(x, 2), x1, x0])
    with ExecutorFactory() as ex:
        x0_val, x1_val, p_val = ex.executor([x0, x1, p])()
    assert x0_val == 0
    assert x1_val == 4
    assert p_val == 0
Ejemplo n.º 12
0
def test_read_state():
    """
    This just reads back a tensor. No code is generated.
    """
    with ExecutorFactory() as ex:
        N = ng.make_axis(3, name='N')
        x_np = np.ones((N.length)) * 4
        x = ng.variable([N], initial_value=x_np).named('x')
        f = ex.executor(x)
        x_val = f()
        assert np.allclose(x_np, x_val)
def test_sequential_side(M):
    x1_np = 2
    x2_np = 3
    b_np = 1
    x_np = np.array([1, 2, 3], dtype=np.float32)

    x = ng.variable([M], initial_value=x_np)
    x1 = ng.persistent_tensor(axes=(), initial_value=x1_np)
    x2 = ng.persistent_tensor(axes=(), initial_value=x2_np)
    x1_vo = ng.value_of(x1)
    x2_vo = ng.value_of(x2)
    b = ng.persistent_tensor(axes=(), initial_value=b_np)

    y = ng.sequential([
        x1_vo, x2_vo,
        ng.assign(x1,
                  ng.sum(x, out_axes=()) + x1 * b + (1 - b)),
        ng.assign(x2,
                  ng.mean(x, out_axes=()) + x2 * b + (1 - b)), x * 2
    ])

    with ExecutorFactory() as ex:
        main_effect = ex.executor((y, x1_vo, x2_vo, x1, x2))
        current_values = ex.executor((x1, x2))

        # Run main path #1
        y_val, x1_init_val, x2_init_val, x1_final_val, x2_final_val = main_effect(
        )
        y_np = x_np * 2

        assert np.allclose(y_val, y_np)
        assert np.allclose(x1_init_val, x1_np)
        assert np.allclose(x2_init_val, x2_np)
        x1_np = np.sum(x_np) + x1_np * b_np + (1 - b_np)
        x2_np = np.mean(x_np) + x2_np * b_np + (1 - b_np)
        assert np.allclose(x1_final_val, x1_np)
        assert np.allclose(x2_final_val, x2_np)

        x1_val, x2_val = current_values()
        assert np.allclose(x1_val, x1_np)
        assert np.allclose(x2_val, x2_np)

        # Run main path #2 (Should be the same as before)
        y_val, x1_init_val, x2_init_val, x1_final_val, x2_final_val = main_effect(
        )
        y_np = x_np * 2

        assert np.allclose(y_val, y_np)
        assert np.allclose(x1_init_val, x1_np)
        assert np.allclose(x2_init_val, x2_np)
        x1_np = np.sum(x_np) + x1_np * b_np + (1 - b_np)
        x2_np = np.mean(x_np) + x2_np * b_np + (1 - b_np)
        assert np.allclose(x1_final_val, x1_np)
        assert np.allclose(x2_final_val, x2_np)
Ejemplo n.º 14
0
def test_use_state():
    """
    Uses the value of a tensor in a computation.
    """
    with ExecutorFactory() as ex:
        N = ng.make_axis(3, name='N')
        x_np = np.ones((N.length)) * 4
        x = ng.variable([N], initial_value=x_np).named('x')
        xx = x + x
        f = ex.executor(xx)
        xx_val = f()
        assert np.allclose(x_np + x_np, xx_val)
Ejemplo n.º 15
0
def test_subgraph_scopes_attribute(input_placeholder):
    """
    Subgraphs should have a scopes attribute that contains scope-subgraph pairs.
    """

    subgraph = SubGraph()
    with scope_ops("scope1", subgraph=subgraph):
        w1 = ng.variable(ng.make_axis(), initial_value=1)
        w1 * input_placeholder

    assert len(subgraph.scopes) == 1
    assert "scope1" in subgraph.scopes
    assert len(subgraph.scopes["scope1"].scopes) == 1

    with scope_ops("scope2", subgraph=subgraph):
        w2 = ng.variable(ng.make_axis(), initial_value=1)
        w2 * input_placeholder

    assert len(subgraph.scopes) == 2
    assert "scope2" in subgraph.scopes
    assert len(subgraph.scopes["scope2"].scopes) == 1
Ejemplo n.º 16
0
    def __call__(self, in_obj):
        if not self.initialized:
            w_axis = ng.make_axis()
            self.weight = ng.variable(axes=[w_axis],
                                      initial_value=2,
                                      metadata={"label": LABELS["weight"]},
                                      name="W")
            self.side_effect = ng.persistent_tensor(axes=[w_axis],
                                                    initial_value=0)

        return ng.sequential([ng.assign(self.side_effect, self.weight),
                              self.weight * in_obj])
Ejemplo n.º 17
0
def test_pad_mixed():
    """
    mix 0 padding with non-0 padding
    """
    input_axes = ng.make_axes([ng.make_axis(1), ng.make_axis(1)])
    x = ng.variable(input_axes)

    pad = ng.pad(x, [0, 1])

    assert pad.axes[0].name == x.axes[0].name
    assert pad.axes[1].name == x.axes[1].name
    assert pad.axes[0].length == x.axes[0].length
    assert pad.axes[1].length != x.axes[1].length
Ejemplo n.º 18
0
def test_execute_non_placeholder():
    """
    Expect a failure if a non-input (Variable) is used as an argument to
    executor.
    """
    N = ng.make_axis(length=1)

    x = ng.temporary([N])
    y = ng.variable([N])

    with pytest.raises(ValueError):
        with executor(x + y, x, y) as ex:
            ex
Ejemplo n.º 19
0
def test_computational_graph_capture(input_placeholder):
    """
    ComputationalGraph object should capture all ops created after it is instantiated.
    """

    cg = ComputationalGraph()
    w = ng.variable(ng.make_axis(), initial_value=1)
    z = w * input_placeholder

    for op in [w, z]:
        assert op in cg.ops

    assert input_placeholder not in cg.ops
    assert w.name in cg.variables
Ejemplo n.º 20
0
def test_slice_nop():
    """
    slicing an axis shouldn't change the name
    """
    input_axes = ng.make_axes([ng.make_axis(1), ng.make_axis(1)])
    x = ng.variable(input_axes)

    s = ng.tensor_slice(x, [
        slice(None, None, None),
        slice(None, None, 1),
    ])

    assert s.axes[0] == x.axes[0]
    assert s.axes[1] == x.axes[1]
def test_sequential_reduce(M):
    x = ng.variable([M], initial_value=1)
    x0 = x + x
    x1 = ng.sum(x0, out_axes=())
    x2 = ng.sum(x0, out_axes=()) + x0
    p = ng.sequential([x0, x1, x2])

    with ExecutorFactory() as ex:
        x0_val, x1_val, x2_val, p_val, x_val = ex.executor([x0, x1, x2, p,
                                                            x])()
        x0_np = x_val + x_val
        x1_np = np.sum(x0_np)
        x2_np = x1_np + x0_np
        assert np.allclose(x0_val, x0_np)
        assert np.allclose(x1_val, x1_np)
        assert np.allclose(x2_val, x2_np)
        assert np.allclose(p_val, x2_np)
Ejemplo n.º 22
0
def ngraph_l2_norm(np_array):
    """
    TODO.

    Arguments:
      np_array: TODO

    Returns:
      TODO
    """
    axes = ()
    for i, l in enumerate(np_array.shape):
        axes |= (ng.make_axis(length=l).named('axis%s' % i),)

    np_tensor = ng.constant(np_array, axes)
    var = ng.variable(axes, initial_value=np_tensor)
    with executor(ng.sqrt(ng.squared_L2(var))) as ex:
        return ex()
def test_idempotent_axes_c():
    """
    Test test axes transformations with autodiff, case c, with broadcast,
    slice, cast and dim-shuffle
    """
    with ExecutorFactory() as ex:
        axes = ng.make_axes([ng.make_axis(3), ng.make_axis(1)])
        result_axes = [ng.make_axis(length=axis.length) for axis in axes]

        # variable
        w = ng.variable(axes, initial_value=np.ones((3, 1)))

        # broadcast l / r, introducing dummy length 1 axes
        l = ng.broadcast(w, axes)
        r = ng.broadcast(w, axes)

        # slice
        axes_slice = [slice(None, None, None), slice(None, None, None)]
        l_sliced = ng.tensor_slice(l, axes_slice)
        r_sliced = ng.tensor_slice(r, axes_slice)

        # cast r
        r_sliced_casted = ng.cast_axes(r_sliced, axes)

        # perform add
        result = ng.add(l_sliced, r_sliced_casted)

        # cast / dimshuffle
        result = ng.cast_axes(result, result_axes)
        result = ng.axes_with_order(result, result_axes)

        # cost and grad
        cost = ng.sum(result, reduction_axes=result.axes)
        grad = ng.deriv(cost, w)

        grad_comp = ex.executor(grad)
        cost_comp = ex.executor(cost)

        cost_comp_ng = cost_comp()
        grad_comp_ng = grad_comp()
        grad_comp_np = np.ones((3, 1)) * 2.
        assert cost_comp_ng == 6.0
        assert np.array_equal(grad_comp_ng, grad_comp_np)
Ejemplo n.º 24
0
def test_mode_setting(input_placeholder):
    """
    Modes can be specified when scoping ops. The `modes` attribute should return all ops
    in the subgraph + any on which they depend.
    """
    w = ng.variable(ng.make_axis(), initial_value=1, name="W")
    with scope_ops(name="mode_scope", mode="test") as subgraph:
        w * input_placeholder

    for op in subgraph:
        if not isinstance(op, ng.TensorValueOp):
            assert "mode" in op.metadata
            assert op.metadata["mode"] == "test"

    assert "test" in subgraph.modes
    assert w not in subgraph.ops
    # Make sure all ops in subgraph are in mode
    for op in [w] + subgraph.ops:
        assert op in subgraph.modes["test"].ops
Ejemplo n.º 25
0
def test_logreg():
    # xs: (C, N), y: (N,)
    xs = np.array([[0.52, 0.88, 0.52, 0.74], [1.12, -1.08, 0.06, -2.49],
                   [0.77, 0.15, -1.3, 1.39]])
    ys = np.array([1, 1, 0, 1])
    max_iter = 10
    alpha = 0.1
    thetas = np.array([0., 0., 0.])

    np_logreg = NumpyLogreg(xs, ys, thetas)

    C, N = ng.make_axis(length=3), ng.make_axis(length=4)

    # input tensors
    xs_v = ng.placeholder((C, N))
    ys_v = ng.placeholder([N])
    alpha_v = ng.placeholder(())
    thetas_var = ng.variable([C], initial_value=thetas)

    # define ops
    ys_pred = ng.sigmoid(ng.dot(thetas_var, xs_v))
    log_likelihoods = ng.log(ys_pred) * ys_v + ng.log(1 - ys_pred) * (1 - ys_v)
    loss = -ng.sum(log_likelihoods, reduction_axes=[N])
    grad_comp = ng.deriv(loss, thetas_var)
    weight_update = ng.sequential(
        [ng.assign(thetas_var, thetas_var - alpha_v * grad_comp), thetas_var])

    # transformer
    with ExecutorFactory() as ex:
        train_eval_func = ex.executor([grad_comp, loss, weight_update], xs_v,
                                      ys_v, alpha_v)

        # evaluate
        for i in range(max_iter):
            grad_np, loss_np, thetas_np = np_logreg.optimize(alpha)
            grad_ng, loss_ng, thetas_ng = train_eval_func(xs, ys, alpha)
            ng.testing.assert_allclose(loss_np, loss_ng, rtol=1e-05, atol=1e-05, \
                                       transformer_overwrite=False)
            ng.testing.assert_allclose(grad_np, grad_ng,  rtol=1e-05, atol=1e-05, \
                                       transformer_overwrite=False)
            ng.testing.assert_allclose(thetas_np, thetas_ng, rtol=1e-05, atol=1e-05, \
                                       transformer_overwrite=False)
def test_idempotent_axes_b():
    """
    Test test axes transformations with autodiff, case b, with broadcast applied
    to the same tensor
    """
    with ExecutorFactory() as ex:
        axes = ng.make_axes([ng.make_axis(3), ng.make_axis(1)])

        w = ng.variable(axes, initial_value=np.ones((3, 1)))
        l = ng.broadcast(w, axes)
        r = ng.broadcast(w, axes)
        result = ng.add(l, r)

        result = ng.cast_axes(result, axes)
        cost = ng.sum(result, reduction_axes=axes)
        grad = ng.deriv(cost, w)

        grad_comp = ex.executor(grad)
        cost_comp = ex.executor(cost)

        assert cost_comp() == 6.0
        assert np.array_equal(grad_comp(), np.ones((3, 1)) * 2.)
def test_idempotent_axes_a():
    """
    Test test axes transformations with autodiff, case a, reference test
    """
    with ExecutorFactory() as ex:
        axes = ng.make_axes([ng.make_axis(3), ng.make_axis(1)])

        w = ng.variable(axes, initial_value=np.ones((3, 1)))
        result = w + w

        result = ng.cast_axes(result, axes)
        cost = ng.sum(result, reduction_axes=axes)
        grad = ng.deriv(cost, w)

        grad_comp = ex.executor(grad)
        cost_comp = ex.executor(cost)

        cost_comp_val = cost_comp()
        grad_comp_val = grad_comp()
        grad_comp_np = np.ones((3, 1)) * 2.

        assert cost_comp_val == 6.0
        assert np.array_equal(grad_comp_val, grad_comp_np)
Ejemplo n.º 28
0
def test_scope_ops(input_placeholder):
    """
    Test scope_ops creates a subgraph with correct attributes
    """

    with scope_ops(name="foo") as subgraph:
        w = ng.variable(ng.make_axis(), initial_value=1, name="W")
        y = w * input_placeholder
        z = y + 4
        v1 = ng.persistent_tensor(w.axes, initial_value=0, name="effect1")
        v2 = ng.persistent_tensor(w.axes, initial_value=0, name="effect2")
        ng.sequential([ng.assign(v1, w), ng.assign(v2, w), z.named("output")])

    assert len(subgraph.inputs) == 1
    assert input_placeholder.unscoped_name in subgraph.inputs

    assert len(subgraph.variables) == 1
    assert "W" in subgraph.variables

    assert len(subgraph.outputs) == 1
    assert "output" in subgraph.outputs

    assert len(subgraph.side_effects) == 2
Ejemplo n.º 29
0
def test_one():
    # Test that the cacheing on constant one used in DerivOp works.
    op = ng.variable([])
    one_0 = op.one
    one_1 = op.one
    assert one_0 is one_1