Exemplo n.º 1
0
def test_lut(lut_args):
    """
    test lut fprop and bprop
    """
    pad_idx = 0
    with ExecutorFactory() as ex:

        vocab_size, embed_dim, bsz, seq_len, mem_size = lut_args

        V = ng.make_axis(vocab_size)
        F = ng.make_axis(embed_dim)
        M = ng.make_axis(mem_size)

        ax.N.length = bsz
        ax.REC.length = seq_len

        # Multi-axis input to LUT
        ax_idx = ng.make_axes([M, ax.REC, ax.N])
        ax_lut = ng.make_axes([V, F])

        lut = ng.placeholder(ax_lut)
        idx = ng.placeholder(ax_idx)
        idx_flat = ng.flatten(idx)
        ax_out = idx_flat.axes | ng.make_axes([F])

        # fprop
        lut_out_ng = ng.lookuptable(lut, idx_flat, ax_out, pad_idx=pad_idx)
        fprop_fun = ex.executor(lut_out_ng, lut, idx)

        # bprop
        update_error = ng.placeholder(ax_out)
        update_out_ng = lookuptable_update(update_error, lut, idx, lut_out_ng)
        update_fun = ex.executor(update_out_ng, update_error, lut, idx)

        # provide actual inputs and execute the graph
        lut_value = rng.uniform(-1, 1, lut.axes)
        idx_value = rng.random_integers(0, vocab_size - 1, idx.axes)
        fprop_lut = fprop_fun(lut_value, idx_value).copy()

        # compare fprop
        fprop_ref = lut_fprop_ref(lut_value, idx_value)
        ng.testing.assert_allclose(fprop_lut, fprop_ref, rtol=0.0, atol=1.0e-5)

        # provide actual delta and execute the update op
        update_value = rng.uniform(-1, 1, update_error.axes)
        update_lut = update_fun(update_value, lut_value, idx_value).copy()

        # compare bprop (udpate)
        update_ref = lut_update_ref(
            update_value,
            lut_value,
            idx_value,
            pad_idx=pad_idx)
        ng.testing.assert_allclose(
            update_lut, update_ref, rtol=0.0, atol=1.0e-5)
Exemplo n.º 2
0
def make_weights(
        input_placeholder,
        hidden_size,
        weight_initializer,
        bias_initializer,
        init_state=False):
    gates = ['i', 'f', 'o', 'g']

    # input axis + any extra axes of length 1
    in_feature_axes = tuple(input_placeholder.axes)[:-2]
    out_feature_axes = ng.make_axes([ng.make_axis(hidden_size)])
    batch_axis = input_placeholder.axes.batch_axis()
    hidden_axis = ng.make_axis(hidden_size)

    w_in_axes = ng.make_axes(hidden_axis) + in_feature_axes
    w_rec_axes = ng.make_axes(hidden_axis) + out_feature_axes

    W_in = {gate: weight_initializer(w_in_axes) for gate in gates}
    W_rec = {gate: weight_initializer(w_rec_axes) for gate in gates}
    b = {gate: bias_initializer(hidden_axis) for gate in gates}

    if init_state is True:
        ax_s = ng.make_axes([hidden_axis, batch_axis])
        init_state = {name: ng.placeholder(ax_s) for name in ['h', 'c']}
        init_state_value = {
            name: rng.uniform(-1, 1, ax_s) for name in ['h', 'c']}
    else:
        init_state = None
        init_state_value = None

    return W_in, W_rec, b, init_state, init_state_value
Exemplo n.º 3
0
def test_distributed_graph_plus_two(transformer_factory):
    H = ng.make_axis(length=4, name='height')
    W = ng.make_axis(length=6, name='width')
    x = ng.placeholder(axes=[H, W])
    with ng.metadata(device_id=('1', '2'), parallel=W):
        x_plus_one = x + 1
    x_plus_two = x_plus_one + 1

    np_x = np.random.randint(100, size=[H.length, W.length])
    with ExecutorFactory() as ex:
        computation = ex.executor(x_plus_two, x)
        res = computation(np_x)
        np.testing.assert_array_equal(res, np_x + 2)
Exemplo n.º 4
0
def test_init_gaussian():
    N = 128
    C = 4
    # XS, YS = g.gen_data(N, 10)

    X = ng.placeholder(axes=ng.Axes([C, N]))
    Y = ng.placeholder(axes=ng.Axes([N]))
    alpha = ng.placeholder(axes=ng.Axes())

    #    W = ng.Variable(axes=ng.Axes([C]), initial_value=ng.fill(sequential))
    W = ng.Variable(axes=ng.Axes([C]), initial_value=10)

    L = W + 1 + alpha

    transformer = ng.NumPyTransformer()
    update_fun = transformer.computation([L, W], alpha, X, Y)

    xs = np.zeros((C, N), dtype=np.float32)
    ys = np.zeros((N, ), dtype=np.float32)
    loss_val, w_val = update_fun(5.0, xs, ys)

    print(loss_val)
Exemplo n.º 5
0
def test_tensor_slice():
    """
    slicing a tensor should work like numpy
    """
    input_axes = ng.make_axes(
        [ng.make_axis(10), ng.make_axis(20),
         ng.make_axis(5)])

    x = ng.placeholder(axes=input_axes)

    assert x[:5].axes.full_lengths == (5, 20, 5)
    assert x[:, 2:7].axes.full_lengths == (10, 5, 5)
    assert x[:5, :, :-1].axes.full_lengths == (5, 20, 4)
Exemplo n.º 6
0
def test_cross_enropy_rec(transformer_factory):
    N = ng.make_axis(name='N', batch=True)
    W = ng.make_axis(name='W')
    T = ng.make_axis(name='T', recurrent=True)

    W.length = 3
    T.length = 4
    N.length = 10
    axes = ng.make_axes([W, T, N])

    p_x = ng.placeholder(axes)
    p_t = ng.placeholder(axes)

    cross_entropy_sm_x_t = ng.cross_entropy_multi(ng.softmax(p_x), p_t)

    x = rng.uniform(0, 1, axes)
    t = np_softmax(rng.uniform(0, 1, axes), 0)

    def f_np(x, t):
        return np_cross_entropy_multi(np_softmax(x, 0), t, axis=0)

    compare_f_at_x(cross_entropy_sm_x_t, [p_x, p_t], f_np, [x, t], rtol=1e-5)
Exemplo n.º 7
0
def test_softmax_rec_deriv(transformer_factory):
    N = ng.make_axis(name='N', batch=True)
    W = ng.make_axis(name='W')
    T = ng.make_axis(name='T', recurrent=True)

    W.length = 3
    T.length = 4
    N.length = 10
    axes = ng.make_axes([W, T, N])

    x = rng.uniform(0, 1, axes)
    p_x = ng.placeholder(axes)
    check_derivative(ng.softmax(p_x), p_x, 0.001, x, atol=1e-2, rtol=1e-2)
Exemplo n.º 8
0
def test_softmax_rec(transformer_factory):
    N = ng.make_axis(name='N', batch=True)
    W = ng.make_axis(name='W')
    T = ng.make_axis(name='T', recurrent=True)

    W.length = 3
    T.length = 4
    N.length = 10
    axes = ng.make_axes([W, T, N])

    x = rng.uniform(0, 1, axes)
    p_x = ng.placeholder(axes)
    compare_f_at_x(ng.softmax(p_x), p_x, lambda x: np_softmax(x, 0), x, rtol=1e-5)
Exemplo n.º 9
0
def test_cross_entropy_softmax_deriv(transformer_factory):
    N = ng.make_axis(name='N', batch=True)
    W = ng.make_axis(name='W')

    W.length = 3
    N.length = 10
    axes = ng.make_axes([W, N])

    p_x = ng.placeholder(axes)
    p_t = ng.placeholder(axes)

    x = rng.uniform(0, 1, axes)
    t = np_softmax(rng.uniform(0, 1, axes), 0)

    check_derivative(ng.cross_entropy_multi(ng.softmax(p_x), p_t),
                     p_x,
                     0.001,
                     x,
                     parameters=[p_t],
                     parameter_values=[t],
                     atol=1e-2,
                     rtol=1e-2)
Exemplo n.º 10
0
def test_convolution_backprop(transformer_factory, n128_hw32_c3_2x2):
    """
    test convolution backprop path
    """
    cf = ConvParams(**n128_hw32_c3_2x2)
    inputs = ng.placeholder(axes=cf.ax_i)
    filters = ng.placeholder(axes=cf.ax_f)

    # randomly initialize
    input_value = rng.uniform(-1, 1, cf.ax_i)
    filter_value = rng.uniform(-1, 1, cf.ax_f)

    output = ng.sum(ng.convolution(cf.conv_params, inputs, filters, cf.ax_o),
                    out_axes=())

    with ExecutorFactory() as factory:
        dcdf_sym_fun = factory.derivative(output, filters, inputs)
        dcdf_num_fun = factory.numeric_derivative(output, filters, .01, inputs)
        dcdf_sym_val = dcdf_sym_fun(filter_value, input_value)
        dcdf_num_val = dcdf_num_fun(filter_value, input_value)

        ng.testing.assert_allclose(dcdf_sym_val, dcdf_num_val, rtol=0.01)
Exemplo n.º 11
0
def test_logreg(transformer_factory):
    # xs: (C, N), y: (N,)
    xs = np.array([[0.52, 0.88, 0.52, 0.74], [1.12, -1.08, 0.06, -2.49],
                   [0.77, 0.15, -1.3, 1.39]])
    ys = np.array([1, 1, 0, 1])
    max_iter = 10
    alpha = 0.1
    thetas = np.array([0., 0., 0.])

    np_logreg = NumpyLogreg(xs, ys, thetas)

    C, N = ng.make_axis(length=3), ng.make_axis(length=4)

    # input tensors
    xs_v = ng.placeholder((C, N))
    ys_v = ng.placeholder([N])
    alpha_v = ng.placeholder(())
    thetas_var = ng.variable([C], initial_value=thetas)

    # define ops
    ys_pred = ng.sigmoid(ng.dot(thetas_var, xs_v))
    log_likelihoods = ng.log(ys_pred) * ys_v + ng.log(1 - ys_pred) * (1 - ys_v)
    loss = -ng.sum(log_likelihoods, reduction_axes=[N])
    grad_comp = ng.deriv(loss, thetas_var)
    weight_update = ng.sequential(
        [ng.assign(thetas_var, thetas_var - alpha_v * grad_comp), thetas_var])

    # transformer
    with ExecutorFactory() as ex:
        train_eval_func = ex.executor([grad_comp, loss, weight_update], xs_v,
                                      ys_v, alpha_v)

        # evaluate
        for i in range(max_iter):
            grad_np, loss_np, thetas_np = np_logreg.optimize(alpha)
            grad_ng, loss_ng, thetas_ng = train_eval_func(xs, ys, alpha)
            ng.testing.assert_allclose(loss_np, loss_ng)
            ng.testing.assert_allclose(grad_np, grad_ng)
            ng.testing.assert_allclose(thetas_np, thetas_ng)
Exemplo n.º 12
0
def test_fill_slice(transformer_factory):
    axes = ng.make_axes([ng.make_axis(length=2), ng.make_axis(length=8)])
    a = ng.placeholder(axes=axes)
    b = ng.sequential([ng.fill(a[:, 1], 0), ng.value_of(a)])

    with ExecutorFactory() as ex:
        func = ex.executor(b, a)
        baseline = func(
            np.array([[1, 2, 3, 4, 5, 6, 7, 8], [8, 7, 6, 5, 4, 3, 2, 1]],
                     dtype=np.float32))
        expected = np.array([[1, 0, 3, 4, 5, 6, 7, 8],
                             [8, 0, 6, 5, 4, 3, 2, 1]])
        ng.testing.assert_allclose(baseline, expected)
Exemplo n.º 13
0
def test_cross_entropy_rec(transformer_factory, recurrent_input_tensor):
    p_x = recurrent_input_tensor
    p_t = ng.placeholder(p_x.axes)

    cross_entropy_sm_x_t = ng.cross_entropy_multi(ng.softmax(p_x), p_t)

    x = rng.uniform(0, 1, p_x.axes)
    t = np_softmax(rng.uniform(0, 1, p_t.axes), 0)

    def f_np(x, t):
        return np_cross_entropy_multi(np_softmax(x, 0), t, axis=0)

    compare_f_at_x(cross_entropy_sm_x_t, [p_x, p_t], f_np, [x, t], rtol=1e-5)
Exemplo n.º 14
0
def test_conv(transformer_factory, n64_hw32_c32_3x3):
    cf = ConvParams(**n64_hw32_c32_3x3)

    inputs = ng.placeholder(axes=cf.ax_i)
    filters = ng.placeholder(axes=cf.ax_f)

    # randomly initialize
    input_value = rng.uniform(-0.5, 0.5, cf.ax_i)
    filter_value = rng.uniform(-0.5, 0.5, cf.ax_f)
    error_value = rng.uniform(-0.5, 0.5, cf.ax_o)

    inputs = ng.placeholder(cf.ax_i)
    filters = ng.placeholder(cf.ax_f)
    errors = ng.placeholder(cf.ax_o)

    output = ng.convolution(cf.conv_params, inputs, filters, axes=cf.ax_o)
    bprop_out = bprop_conv(errors, inputs, filters, output)
    updat_out = update_conv(errors, inputs, filters, output)

    with executor([output, bprop_out, updat_out], inputs, filters,
                  errors) as conv_executor:
        result_ng, gradI_ng, gradF_ng = conv_executor(input_value,
                                                      filter_value,
                                                      error_value)

    # Compute reference with NumPy
    result_np, gradI_np, gradF_np = reference_conv(cf.dimI, cf.dimF, cf.dimO,
                                                   cf.conv_params, input_value,
                                                   filter_value, error_value)

    # Compare fprop
    assert np.allclose(result_ng, result_np, rtol=0, atol=0.5)

    # Compare bprop
    assert np.allclose(gradI_ng, gradI_np, rtol=0, atol=0.5)

    # Compare update
    assert np.allclose(gradF_ng, gradF_np, rtol=0, atol=2)
Exemplo n.º 15
0
def test_cross_entropy_softmax_rec_deriv(transformer_factory, recurrent_input_tensor):
    p_x = recurrent_input_tensor
    p_t = ng.placeholder(p_x.axes)

    x = rng.uniform(0, 1, p_x.axes)
    t = np_softmax(rng.uniform(0, 1, p_t.axes), 0)

    check_derivative(
        ng.cross_entropy_multi(ng.softmax(p_x), p_t),
        p_x, 0.001, x,
        parameters=[p_t],
        parameter_values=[t],
        atol=1e-2, rtol=1e-2
    )
Exemplo n.º 16
0
def test_cross_entropy_binary_logistic_shortcut(input_tensor):
    """TODO."""
    p_u = input_tensor
    p_v = ng.placeholder(p_u.axes)
    u = rng.uniform(-3.0, 3.0, p_u.axes)
    v = np_softmax(rng.uniform(-3.0, 3.0, p_u.axes), 0)

    cel = cross_entropy_binary_logistic(u, v)
    cel_shortcut = cross_entropy_binary_logistic_shortcut(u, v)
    ng.testing.assert_allclose(cel, cel_shortcut, rtol=1e-5)

    with executor(ng.cross_entropy_binary_inner(ng.sigmoid(p_u), p_v), p_u, p_v) as ex:
        cel_graph = ex(u, v)
    ng.testing.assert_allclose(cel, cel_graph, rtol=1e-5)
Exemplo n.º 17
0
def test_assign(transformer_factory, operands, test_name):
    v = ng.variable(())
    ng_placeholder = ng.placeholder(())
    vset = ng.sequential([ng.assign(v, ng_placeholder), v])
    iterations = len(operands) != 1
    with executor(vset, ng_placeholder) as ex:
        for i in operands:
            flex_result = ex(i[0])
            print("flex: ", flex_result)
            print("expected: ", i[1])
            if iterations:
                assert_allclose(flex_result, i[1])
            else:
                assert flex_result == i[1]
Exemplo n.º 18
0
def test_wrong_op_name():
    """
    test wrong number of batch axes at input
    """
    ax_i = ng.make_axes([ax.C, ax.D, ax.H, ax.W, ax.N])
    inputs = ng.placeholder(axes=ax_i)
    pooltype = 'min'
    pool_params = dict(op=pooltype)

    with pytest.raises(ValueError) as exinfo:
        ng.pooling(pool_params, inputs, {})

    assert str(exinfo.value) == "Unsupported pooling type: {pooltype}.  Only max and avg " \
        "pooling currently supported. ".format(pooltype=pooltype)
Exemplo n.º 19
0
def test_first_axes_not_same():
    """
    test first axes are not the same
    """
    padding = dict(pad_d=0, pad_h=0, pad_w=0)
    strides = dict(str_d=1, str_h=1, str_w=1)
    dilation = dict(dil_d=1, dil_h=1, dil_w=1)
    conv_params = padding.copy()
    conv_params.update(strides)
    conv_params.update(dilation)

    ax_i = ng.make_axes([ax.D, ax.C, ax.H, ax.W, ax.N])
    ax_f = ng.make_axes([ax.C, ax.T, ax.R, ax.S, ax.K])

    inputs = ng.placeholder(ax_i)
    filters = ng.placeholder(ax_f)

    with pytest.raises(ValueError) as exinfo:
        ng.convolution(conv_params, inputs, filters, {})
    assert str(exinfo.value) == 'the first axis in input {inputs} and filter {filters} ' \
        'are not the same.'.format(
            inputs=inputs.axes[0],
            filters=filters.axes[0])
Exemplo n.º 20
0
    def make_placeholders(self):
        batch_axis = ng.make_axis(length=self.batch_size, name="N")
        time_axis = ng.make_axis(length=self.time_steps, name="REC")
        feature_axis = ng.make_axis(length=self.nfeatures, name="feature_axis")

        dict = {}
        for k in self.data_arrays.keys():
            if k == 'inp_txt' or k == 'teacher_tgt':
                p_axes = ng.make_axes([batch_axis, time_axis, feature_axis])
            else:
                p_axes = ng.make_axes([batch_axis, time_axis])
            dict[k] = ng.placeholder(p_axes)

        return dict
Exemplo n.º 21
0
def test_wrong_input_shape_length():
    """
    test wrong input shape length
    """
    pf = PoolParams()

    ax_i = pf.ax_i[:-1]
    inputs = ng.placeholder(axes=ax_i)

    with pytest.raises(ValueError) as exinfo:
        ng.pooling(pf.pool_params, inputs, {})

    assert str(exinfo.value) == 'pooling input shape must be length 5, found {}' \
        .format(len(ax_i))
Exemplo n.º 22
0
def test_tensor_size(transformer_factory):
    n, m = 3, 4

    N = ng.make_axis(length=n)
    M = ng.make_axis(length=m)

    aaxes = ng.make_axes([N, M])
    x = ng.placeholder(aaxes)

    size_fun = ng.tensor_size(x)
    nptensor = np.arange(n * m).reshape(n, m)

    with executor(size_fun, x) as ex:
        assert ex(nptensor) == n * m
Exemplo n.º 23
0
def make_placeholder(input_size, sequence_length, batch_size, extra_axes=0):

    input_axis = ng.make_axis()
    recurrent_axis = ng.make_axis(name='R')
    batch_axis = ng.make_axis(name='N')

    input_axes = ng.make_axes([input_axis, recurrent_axis, batch_axis])
    input_axes.set_shape((input_size, sequence_length, batch_size))
    input_axes = ng.make_axes([ng.make_axis(length=1) for _ in range(extra_axes)]) + input_axes

    input_placeholder = ng.placeholder(input_axes)
    input_value = rng.uniform(-0.01, 0.01, input_axes)

    return input_placeholder, input_value
Exemplo n.º 24
0
def get_placeholder_from_operand(operand, axes=None):
    if not isinstance(axes, ng.Axes):
        if not isinstance(operand, np.ndarray):
            axes = ()
        else:
            if len(operand.shape) > 1:
                rows, columns = operand.shape
                N = ng.make_axis(length=rows)
                M = ng.make_axis(length=columns)
                axes = ng.make_axes([N, M])
            else:
                O = ng.make_axis(length=operand.size)
                axes = ng.make_axes([O])
    return ng.placeholder(axes), axes
Exemplo n.º 25
0
def test_update_comm_deps_scatter_gather():
    ax_a = ng.make_axis(length=10, name='A')
    ax_b = ng.make_axis(length=15, name='B')
    axes = ng.make_axes([ax_a, ax_b])

    parallel_metadata = dict(parallel=ax_a, device_id=(0, 1),
                             transformer=None, host_transformer=None, device=None)
    with ng.metadata(transformer='cpu0'):
        with ng.metadata(**parallel_metadata):
            from_node_a = ng.placeholder(axes)
            to_node_a = ng.placeholder(axes)
        scatter_send_x = ScatterSendOp(from_node=from_node_a, to_node=to_node_a)
        scatter_recv_a = ScatterRecvOp(to_node=to_node_a, send_node=scatter_send_x)
        with ng.metadata(**parallel_metadata):
            x_plus_one_a = scatter_recv_a + 1
        gather_send_x_plus_one_a = GatherSendOp(from_node=x_plus_one_a)

    with ng.metadata(transformer='cpu1'):
        with ng.metadata(**parallel_metadata):
            to_node_b = ng.placeholder(axes)
        scatter_recv_b = ScatterRecvOp(to_node=to_node_b, send_node=scatter_send_x)
        with ng.metadata(**parallel_metadata):
            x_plus_one_b = scatter_recv_b + 1
        gather_send_x_plus_one_b = GatherSendOp(from_node=x_plus_one_b)

    with ng.metadata(transformer='cpu0'):
        with ng.metadata(**parallel_metadata):
            gather_recv_x_plus_one_a = GatherRecvOp(from_node=from_node_a, to_node=to_node_a,
                                                    send_node=gather_send_x_plus_one_a)
            z_a = gather_recv_x_plus_one_a + 1

    update_comm_deps((scatter_send_x, gather_send_x_plus_one_a, z_a))
    update_comm_deps((gather_send_x_plus_one_b,))

    assert set([scatter_send_x]) == set(scatter_recv_a.control_deps)
    assert set([scatter_send_x, gather_send_x_plus_one_a]) == \
        set(gather_recv_x_plus_one_a.control_deps)
Exemplo n.º 26
0
def test_scatter_gather_node_axes(config):
    t = config
    axes = ng.make_axes([ng.make_axis(length) for length in t['axes']])
    parallel_axis = axes[t['parallel_axis']]
    with ng.metadata(device=None,
                     device_id='0',
                     transformer='cpu0',
                     host_transformer=None):
        from_node = ng.placeholder(axes=axes)
        to_node = ng.placeholder(axes=axes)

    with ng.metadata(device=None,
                     device_id=t['device_id'],
                     transformer=None,
                     parallel=parallel_axis,
                     host_transformer=None):
        par_node = ng.placeholder(axes=axes)

    scatter_send_op = ScatterSendOp(from_node=from_node, to_node=par_node)
    assert axes == scatter_send_op.axes
    assert t['slices'] == scatter_send_op.slices

    scatter_recv_op = ScatterRecvOp(to_node=par_node,
                                    send_node=scatter_send_op)

    for sct_a, a in zip(scatter_recv_op.axes, axes):
        assert sct_a.length == a.length

    gather_send_op = GatherSendOp(from_node=scatter_recv_op)
    assert_axes_eq_len(scatter_recv_op.axes, gather_send_op.axes)

    gather_recv_op = GatherRecvOp(from_node=par_node,
                                  to_node=to_node,
                                  send_node=gather_send_op)
    assert_axes_eq_len(axes, gather_recv_op.axes)

    assert t['slices'] == gather_recv_op.slices
Exemplo n.º 27
0
def test_weight_clipping(w_clip, optimizer):
    opt_ng = optimizer(0.1, weight_clip_value=w_clip)
    if isinstance(opt_ng, Adam):
        pytest.config.argon_skip_now("Argon Transformer error")  # TODO triage

    # Set up data placeholders
    C = ng.make_axis(20)
    N = ng.make_axis(32, name='N')

    data = ng.placeholder([C, N])
    target = ng.placeholder([N])

    # params to be updated using optimizer to be tested
    # make sure initial values are higher than clip values
    np_W = 10 * w_clip * (2 * np.random.rand(C.length) - 1)
    W = ng.variable([C], initial_value=np_W)

    # double check generated initial W value
    assert np.max(np_W) > w_clip
    assert np.min(np_W) < -w_clip

    # Set up op graph
    cost = ng.sum(target - ng.dot(W, data), out_axis=())

    updated_weights = ng.sequential([opt_ng(cost), W])

    epsilon = w_clip * 1e-3
    # Set up the computation and run the "train" loop
    with ExecutorFactory() as ex:
        opt_ng_comp = ex.transformer.computation(updated_weights, data, target)
        mock_dataset = data_generator(20, C.length, N.length)

        for x, y in mock_dataset:
            ng_W = opt_ng_comp(x, y)  # updated weights for ngraph optimizer

            assert np.max(ng_W) < w_clip + epsilon
            assert np.min(ng_W) > -w_clip - epsilon
Exemplo n.º 28
0
def test_prod_deriv(
        prod_deriv_arrays):  # Argon Transformer error - TODO triage
    """
    Test reduce product's gradient
    """
    def power_set(lst):
        """
        power_set([0, 1, 2]) is:
        [[], [0], [1], [0, 1], [2], [0, 2], [1, 2], [0, 1, 2]]
        """
        result = [[]]
        for x in lst:
            result.extend([subset + [x] for subset in result])
        return result

    def get_all_reduction_axes(axes):
        """
        Get all possible reduction axes
        """
        ndim = len(axes.lengths)
        if ndim == 0:
            return axes
        else:
            results = []
            all_indices = power_set(range(ndim))
            for indices in all_indices:
                if not indices:
                    results.append(ng.make_axes([]))
                else:
                    results.append(
                        ng.make_axes([axes[index] for index in indices]))
            return results

    def shape_to_axes(shape):
        """
        Convert shape to axes
        """
        if not shape:
            return ng.make_axes()
        axes = ng.make_axes([ng.make_axis(length=s) for s in shape])
        return axes

    x_val = prod_deriv_arrays
    axes = shape_to_axes(x_val.shape)
    all_reduction_axes = get_all_reduction_axes(axes)
    for reduction_axes in all_reduction_axes:
        x = ng.placeholder(axes=axes)
        x_prod = ng.prod(x, reduction_axes)
        check_derivative(x_prod, x, 0.001, x_val, atol=1e-3, rtol=1e-3)
Exemplo n.º 29
0
def test_flatten_deriv_simplified():
    """
    Test derivative with dot and flatten
    """
    ax_N = ng.make_axis(length=3)
    ax_Y = ng.make_axis(length=2)

    x = ng.placeholder(ng.make_axes([ax_N]))
    w = ng.constant([5, 2], axes=ng.make_axes([ax_Y]))
    logits = ng.dot(x, w)
    cost = ng.sum(logits, reduction_axes=logits.axes)

    delta = 0.001
    u = rng.uniform(.1, 5.0, x.axes)
    check_derivative(cost, x, delta, u, atol=1e-2, rtol=1e-2)
Exemplo n.º 30
0
def test_deconv(transformer_factory, deconv_n4_hw4_c1_5x5):
    cf = ConvParams(**deconv_n4_hw4_c1_5x5)

    # randomly initialize
    input_value = rng.uniform(-0.5, 0.5, cf.ax_i)
    filter_value = rng.uniform(-0.5, 0.5, cf.ax_f)
    error_value = rng.uniform(-0.5, 0.5, cf.ax_o)

    inputs = ng.placeholder(cf.ax_i)
    filters = ng.placeholder(cf.ax_f)
    errors = ng.placeholder(cf.ax_o)

    output = ng.deconvolution(cf.conv_params, inputs, filters, axes=cf.ax_o)
    bprop_out = ng.deriv(output, inputs, errors)
    updat_out = ng.deriv(output, filters, errors)

    with executor([output, bprop_out, updat_out], inputs, filters,
                  errors) as conv_executor:
        result_ng, gradI_ng, gradF_ng = conv_executor(input_value,
                                                      filter_value,
                                                      error_value)

    # Compute reference with NumPy
    result_np = reference_deconv_fprop(cf.conv_params, input_value,
                                       filter_value)
    gradI_np, gradF_np = reference_deconv_bprop(cf.conv_params, error_value,
                                                input_value, filter_value)

    # Compare fprop
    assert np.allclose(result_ng, result_np, rtol=0.1, atol=0)

    # Compare bprop
    assert np.allclose(gradI_ng, gradI_np, rtol=0.1, atol=0)

    # Compare update
    assert np.allclose(gradF_ng, gradF_np, rtol=0.1, atol=0)
Exemplo n.º 31
0
def test_cross_entropy_multi_axis_order(transformer_factory, input_tensor):
    """If y and t have different axis orders, it should give the same result"""
    y = input_tensor
    t1 = ng.placeholder(y.axes)

    # Reorder axes
    feature_axis, batch_axis = y.axes
    t2 = ng.placeholder(ng.make_axes([batch_axis, feature_axis]))

    # Set up numpy variables
    np_y = np.random.uniform(0, 1, y.axes.lengths)
    if feature_axis.length > batch_axis.length:
        np_t1 = np.eye(feature_axis.length)[:, :batch_axis.length]
    else:
        np_t1 = np.eye(batch_axis.length)[:feature_axis.length, :]
    np_t2 = np_t1.T

    with ExecutorFactory() as ex:
        f1 = ex.executor(ng.cross_entropy_multi(ng.softmax(y), t1), y, t1)
        f2 = ex.executor(ng.cross_entropy_multi(ng.softmax(y), t2), y, t2)

        out1 = f1(np_y, np_t1)
        out2 = f2(np_y, np_t2)
        ng.testing.assert_allclose(out1.ravel(), out2.ravel(), rtol=1e-5)
Exemplo n.º 32
0
def test_layer_caching():

    in_obj = ng.placeholder(())
    layer = SimpleLayer()
    out_train = layer(in_obj)
    out_train2 = layer(in_obj)
    with Layer.inference_mode_on():
        out_inference = layer(in_obj)
        out_inference2 = layer(in_obj)
    out_train3 = layer(in_obj)

    assert out_train is out_train2, "Training mode call not cached"
    assert out_inference is out_inference2, "Inference mode call not cached"
    assert out_train is not out_inference, "Training and inference mode calls are the same"
    assert out_train is out_train3, "Training mode not restored"
Exemplo n.º 33
0
def make_placeholder(input_size, sequence_length, batch_size, extra_axes=0):

    input_axis = ng.make_axis(name='features')
    recurrent_axis = ng.make_axis(name='REC_REP')
    batch_axis = ng.make_axis(name='N')

    input_axes = ng.make_axes([input_axis, recurrent_axis, batch_axis])
    input_axes.set_shape((input_size, sequence_length, batch_size))
    input_axes = ng.make_axes([ng.make_axis(length=1, name='features_' + str(i))
                               for i in range(extra_axes)]) + input_axes

    input_placeholder = ng.placeholder(input_axes)
    rng = RandomTensorGenerator()
    input_value = rng.uniform(-0.01, 0.01, input_axes)

    return input_placeholder, input_value