示例#1
0
def test_binary_op(ng_func, np_func):
    H = ng.make_axis().named('H')
    W = ng.make_axis().named('W')

    tests = [
        {
            'tensor1': [[1, 2, 3, 4], [5, 6, 7, 8]],
            'tensor1_axes': (H, W),
            'tensor2': [[10, 2, 3, 40], [15, 6, 9, 8]],
            'tensor2_axes': (H, W),
            'axes_lengths': {H: 2, W: 4}
        }]

    for test in tests:
        # set up tensors
        for axis, length in test['axes_lengths'].items():
            axis.length = length

        tensor1 = ng.placeholder(test['tensor1_axes'])
        value1 = np.array(test['tensor1'], dtype=np.float32)

        tensor2 = ng.placeholder(test['tensor2_axes'])
        value2 = np.array(
            test['tensor2'], dtype=np.float32
        )

        _ng_func = ng_func(tensor1, tensor2)

        with ExecutorFactory() as ex:

            _ng_computation = ex.executor(_ng_func, tensor1, tensor2)
            _ng_val = _ng_computation(value1, value2)
            _ng_ref = np_func(value1, value2)
            np.testing.assert_equal(_ng_val, _ng_ref)
示例#2
0
def test_flat_tensor_dot_tensor():
    """
    Ensure that a flattened argument axis is not unflattend in the result.

    """
    H = ng.make_axis(2)
    W = ng.make_axis(7)
    C = ng.make_axis(3)
    K = ng.make_axis(11)

    axes_a = ng.make_axes([H, W, C])
    a = ng.constant(np.ones(axes_a.lengths), axes=axes_a)
    flat_a = ng.flatten_at(a, 2)

    axes_b = ng.make_axes([C, K])
    b = ng.constant(np.ones(axes_b.lengths), axes=axes_b)

    result = ng.dot(b, flat_a)

    with ExecutorFactory() as factory:
        result_fun = factory.executor(result)
        result_val = result_fun()

    result_correct = np.ones_like(result_val) * C.length
    ng.testing.assert_allclose(result_val, result_correct)
示例#3
0
def test_variable():
    input_axes = ng.make_axes([ng.make_axis(10), ng.make_axis(3)])
    var = ng.variable(axes=input_axes)
    assign_val = np.random.rand(10, 3)
    var_assign = ng.AssignOp(tensor=var, val=assign_val)
    var_seq = ng.sequential([var_assign, var])
    var_comp = ng.computation(var_seq, "all")
    results = dict()
    weight_saver = Saver()
    with closing(ngt.make_transformer()) as transformer:
        var_func = transformer.add_computation(var_comp)
        weight_saver.setup_save(transformer=transformer, computation=var_comp)
        results['saved'] = var_func().copy()
        weight_saver.save(filename="test_variable")

    reassign_val = np.random.rand(10, 3)
    var_reassign = ng.AssignOp(tensor=var, val=reassign_val)

    var_recomp = ng.computation(var_reassign, "all")
    var_read = ng.computation(var, "all")
    with closing(ngt.make_transformer()) as restore_transformer:
        var_recompfunc = restore_transformer.add_computation(var_recomp)
        weight_saver.setup_restore(transformer=restore_transformer,
                                   computation=var_recomp,
                                   filename="test_variable")
        var_readfunc = restore_transformer.add_computation(var_read)
        var_recompfunc()
        results['reassigned'] = var_readfunc().copy()
        weight_saver.restore()
        results['restored'] = var_readfunc().copy()
    os.remove("test_variable.npz")
    assert np.allclose(results['saved'], assign_val, atol=0)
    assert np.allclose(results['reassigned'], reassign_val, atol=0)
    assert np.allclose(results['saved'], results['restored'], atol=0)
示例#4
0
def test_unary_op_(ng_func, np_func):
    H = ng.make_axis().named('H')
    W = ng.make_axis().named('W')

    tests = [
        {
            'tensor1': [[1, 2, 3, 4], [5, 6, 7, 8]],
            'tensor1_axes': (H, W),
            'axes_lengths': {H: 2, W: 4}
        }]

    for test in tests:
        # set up tensors
        for axis, length in test['axes_lengths'].items():
            axis.length = length

        tensor1 = ng.placeholder(test['tensor1_axes'])
        value1 = np.array(test['tensor1'], dtype=np.float32)

        _ng_func = ng_func(tensor1)

        with ExecutorFactory() as ex:
            _ng_computation = ex.executor(_ng_func, tensor1)
            _ng_val = _ng_computation(value1)
            _ng_ref = np_func(value1)
            assert np.allclose(_ng_val, _ng_ref, rtol=0, atol=2)
def input_axes(request):
    return ng.make_axes([
        ng.make_axis(length=request.param[0]),
        ng.make_axis(length=request.param[1]),
        ng.make_axis(length=request.param[2]),
        ng.make_axis(length=request.param[3])
    ])
def test_exit_condition(transformer_factory):
    bsz = 16
    class_num = 10

    # Limiting maximum absolute value for tensors elements to 7.9.
    #
    # There is used np.random.randn function to fill tensors with random values. It can give any
    # value as a result however values above 5 are highly improbable and would appear very rarely.
    # Limit 7.9 would almost never modify the tested tensor but would prevent from random
    # failures from time to time when the test is run in continuous environment.
    # This limit is approximate upper bound of range [4, 8). Numbers from this region can be
    # expressed by flexpoint number of the same dec.
    # Why not 15.9 that is approximate limit of [8, 16) range ?
    # Numbers above 8 are highly improbable and if appear from time to time can cause random
    # failures due to reduced accuracy of all numbers in tensor. Most numbers in normal
    # distribution are close to 0.

    is_flex = is_flex_factory(transformer_factory)
    clip_val = 7.9 if is_flex else 0

    N, Y = ng.make_axis(bsz), ng.make_axis(class_num)
    y_val = rng.randn_abs_clip(ng.make_axes([N, Y]), clip_max=clip_val)
    y = ng.constant(y_val, ng.make_axes([N, Y]))

    likelihood = ng.log(ng.softmax(y, normalization_axes=y.axes[1]))

    with ExecutorFactory() as ex:
        comp = ex.executor(likelihood)

        val1 = comp()
        val2 = comp()
        ng.testing.assert_allclose(val1, val2, atol=0, rtol=0)
示例#7
0
def test_stack():
    W = ng.make_axis(length=4)
    H = ng.make_axis(length=5)
    I = ng.make_axis(length=3)

    axes = ng.make_axes([W, H])

    rng = RandomTensorGenerator(0, np.float32)

    a_v = [rng.uniform(0, 1, axes) for i in range(I.length)]

    for pos in range(len(axes) + 1):
        a = [ng.placeholder(axes, initial_value=p) for p in a_v]

        s = ng.stack(a, I, pos)

        with ExecutorFactory() as ex:
            num_funs = [
                ex.numeric_derivative(s, p, delta,
                                      *(np for np in a if np is not p))
                for p in a
            ]
            sym_funs = [
                ex.derivative(s, p, *(np for np in a if np is not p))
                for p in a
            ]

            for n_fun, s_fun, a_i in zip(num_funs, sym_funs, a_v):
                na_is = list(na_i for na_i in a_v if na_i is not a_i)
                d_n = n_fun(a_i, *na_is)
                d_s = s_fun(a_i, *na_is)
                ng.testing.assert_allclose(d_n, d_s, rtol=rtol, atol=atol)
def test_kernel_cache(transformer_factory):
    X = ng.make_axis(32)
    Y = ng.make_axis(32)
    C = ng.make_axis(16384)
    axes = ng.make_axes([
        X,
        Y
    ])
    bcast_axes = ng.make_axes([
        X,
        Y,
        C
    ])

    # Limiting maximum absolute value for tensors elements to 7.9.
    # See description in function test_exit_condition above

    is_flex = is_flex_factory(transformer_factory)
    clip_val = 7.9 if is_flex else 0

    x_val = rng.randn_abs_clip(axes, clip_max=clip_val)
    y_val = rng.randn_abs_clip(bcast_axes, clip_max=clip_val)
    z_val = rng.randn_abs_clip(bcast_axes, clip_max=clip_val)

    x = ng.constant(x_val, axes)
    y = ng.constant(y_val, bcast_axes)
    z = ng.constant(z_val, bcast_axes)

    out = ng.add(ng.add(x, y), z)

    with executor(out) as ex:
        graph_val = ex()
    np_val = np.add(np.add(x_val.reshape(32, 32, 1), y_val), z_val)
    ng.testing.assert_allclose(graph_val, np_val, rtol=1e-4, atol_multiplier=2)
示例#9
0
def make_weights(input_placeholder,
                 hidden_size,
                 weight_initializer,
                 bias_initializer,
                 init_state=False):
    in_feature_axes = tuple(
        input_placeholder.axes)[:-2]  # input axis + any extra axes of length 1
    out_feature_axes = ng.make_axes([ng.make_axis(hidden_size)])
    batch_axis = input_placeholder.axes.batch_axis()
    hidden_axis = ng.make_axis(hidden_size)

    w_in_axes = ng.make_axes(hidden_axis) + in_feature_axes
    w_rec_axes = ng.make_axes(hidden_axis) + out_feature_axes

    W_in = weight_initializer(w_in_axes)
    W_rec = weight_initializer(w_rec_axes)
    b = bias_initializer(hidden_axis)

    if init_state is True:
        ax_s = ng.make_axes([hidden_axis, batch_axis])
        init_state = ng.placeholder(ax_s)
        init_state_value = rng.uniform(-1, 1, ax_s)
    else:
        init_state = None
        init_state_value = None

    return W_in, W_rec, b, init_state, init_state_value
示例#10
0
def test_weight_clipping(w_clip, optimizer):
    opt_ng = optimizer(0.1, weight_clip_value=w_clip)

    # Set up data placeholders
    C = ng.make_axis(20)
    N = ng.make_axis(32, name='N')

    data = ng.placeholder([C, N])
    target = ng.placeholder([N])

    # params to be updated using optimizer to be tested
    # make sure initial values are higher than clip values
    np_W = 10 * w_clip * (2 * np.random.rand(C.length) - 1)
    W = ng.variable([C], initial_value=np_W)

    # double check generated initial W value
    assert np.max(np_W) > w_clip
    assert np.min(np_W) < -w_clip

    # Set up op graph
    cost = ng.sum(target - ng.dot(W, data), out_axis=())

    updated_weights = ng.sequential([opt_ng(cost), W])

    epsilon = w_clip * 1e-3
    # Set up the computation and run the "train" loop
    with ExecutorFactory() as ex:
        opt_ng_comp = ex.transformer.computation(updated_weights, data, target)
        mock_dataset = data_generator(20, C.length, N.length)

        for x, y in mock_dataset:
            ng_W = opt_ng_comp(x, y)  # updated weights for ngraph optimizer

            assert np.max(ng_W) < w_clip + epsilon
            assert np.min(ng_W) > -w_clip - epsilon
示例#11
0
def compare_optimizer_variable_select(opt_ng, opt_ref):

    # Set up data placeholders
    C = ng.make_axis(20)
    N = ng.make_axis(32, name='N')

    data = ng.placeholder([C, N])
    target = ng.placeholder([N])

    # params to be updated using optimizer to be tested
    np_W1 = np.random.rand(C.length)
    np_W2 = np.random.rand(C.length)
    W1 = ng.variable([C], initial_value=np_W1)
    W2 = ng.variable([C], initial_value=np_W2)

    # Set up op graph
    cost = ng.sum(target - ng.dot(W1, data) - ng.dot(W2, data), out_axis=())
    updated_weights = ng.sequential([opt_ng(cost, variables=[W1]), W1])

    # Set up the computation and run the "train" loop
    with ExecutorFactory() as ex:
        opt_ng_comp = ex.transformer.computation([updated_weights, W2], data, target)
        mock_dataset = data_generator(20, C.length, N.length)

        for x, y in mock_dataset:
            [ng_W1, ng_W2] = opt_ng_comp(x, y)  # updated weights for ngraph optimizer
            np_W1 = opt_ref(x, np_W1)   # updated weights for reference optimizer

            ng.testing.assert_allclose(np_W1, ng_W1, rtol=1e-3)
            ng.testing.assert_allclose(np_W2, ng_W2, rtol=1e-3)
示例#12
0
def test_dropout_train(nin, batch_size, keep):

    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin, name='F')

    inp = ng.placeholder([F, N])
    layer = Dropout(keep=keep)
    fprop = layer(inp)

    # create data
    x = np.random.uniform(size=(nin, batch_size))

    # evaluate
    with ExecutorFactory() as ex:
        comp = ex.executor([fprop, layer.mask], inp)
        out, mask = comp(x)
        numpy_out = x * mask[:, None]
        ng.testing.assert_allclose(out, numpy_out, atol=atol, rtol=rtol)

        if keep < 1.0:
            out1, mask1 = out.copy(), mask.copy()
            out2, mask2 = comp(x)
            assert (out1 != out2).any()
            assert (mask1 != mask2).any()
示例#13
0
def test_duplicate_axis_different_length():
    a = ng.make_axis(1, name='N')
    b = ng.make_axis(2, name='N')
    with pytest.raises(ValueError) as e:
        ng.make_axes([a, b])

    # ensure the name of the axis appears in the exception
    assert 'N' in str(e)
示例#14
0
def CDHWN():
    return ng.make_axes([
        ng.make_axis(3, name='C'),
        ng.make_axis(4, name='D'),
        ng.make_axis(5, name='H'),
        ng.make_axis(6, name='W'),
        ng.make_axis(8, name='N')
    ])
示例#15
0
 def baseline_value(self, x):
     '''
     Use defined ngraph constructed computation to evaluate
     activation on inputs x
     '''
     X = ng.placeholder([ng.make_axis(), ng.make_axis(name='N')])
     X.axes.set_shape(x.shape)
     with ExecutorFactory() as ex:
         activation_function = ex.executor(self.neon_activation(X), X)
         return activation_function(x)
示例#16
0
def test_linear_keep_batch_axis():
    feature_axis = ng.make_axis(1, name='A')
    batch_axis = ng.make_axis(2, name='N')

    x = ng.placeholder([batch_axis])
    linear = Linear(axes=feature_axis,
                    keep_axes=[batch_axis],
                    init=UniformInit(1.0, 1.0))(x)

    assert linear.axes == ng.make_axes([feature_axis, batch_axis])
示例#17
0
def test_change_recurrent_axis_length(recurrent_layer_cls, batch_size,
                                      sequence_length, input_size,
                                      hidden_size):
    """
    Recurrent layer support for changing REC axis length
    (needed by seq2seq inference)
    """
    # create three identical recurrent layers with same weights
    W_input_val = np.random.normal(size=(hidden_size, input_size))
    W_recur_val = np.random.normal(size=(hidden_size, hidden_size))
    rec1 = recurrent_layer_cls(nout=hidden_size,
                               init=ConstantInit(W_input_val),
                               init_inner=ConstantInit(W_recur_val),
                               activation=Tanh())
    rec2 = recurrent_layer_cls(nout=hidden_size,
                               init=ConstantInit(W_input_val),
                               init_inner=ConstantInit(W_recur_val),
                               activation=Tanh())
    rec3 = recurrent_layer_cls(nout=hidden_size,
                               init=ConstantInit(W_input_val),
                               init_inner=ConstantInit(W_recur_val),
                               activation=Tanh())

    # create input placeholders and values
    # sequence length greater than 1
    N = ng.make_axis(length=batch_size, name='N')
    REC = ng.make_axis(length=sequence_length, name='REC')
    M = ng.make_axis(length=input_size, name='M')
    xn_axes = ng.make_axes([M, REC, N])
    xn = ng.placeholder(axes=xn_axes)
    xn_val = np.random.normal(size=(input_size, sequence_length, batch_size))
    # sequence length 1
    REC1 = ng.make_axis(length=1, name='REC')
    x1_axes = ng.make_axes([M, REC1, N])
    x1 = ng.placeholder(axes=x1_axes)
    x1_val = np.random.normal(size=(input_size, 1, batch_size))

    # check results of switching REC axis of a layer's input
    # computations switching REC axis
    y1_n = rec1(xn)
    y1_1 = rec1(x1)

    # check against not switching
    y2_n = rec2(xn)
    y3_1 = rec3(x1)

    with ExecutorFactory() as ex:

        y1_n_comp = ex.executor(y1_n, xn)
        y1_1_comp = ex.executor(y1_1, x1)
        y2_n_comp = ex.executor(y2_n, xn)
        y3_1_comp = ex.executor(y3_1, x1)

        ng.testing.assert_allclose(y1_n_comp(xn_val), y2_n_comp(xn_val))
        ng.testing.assert_allclose(y1_1_comp(x1_val), y3_1_comp(x1_val))
def test_scalar_broadcast():
    """
    Test broadcasting a scalar into a tensor
    """
    with ExecutorFactory() as ex:
        x_axes = ng.make_axes()
        broadcast_axes = ng.make_axes([ng.make_axis(2), ng.make_axis(3)])
        x = ng.constant(1., axes=x_axes)
        z = ng.broadcast(x, axes=broadcast_axes)
        z_comp = ex.executor(z)
        assert np.array_equal(z_comp(), np.ones(broadcast_axes.lengths))
示例#19
0
def test_linear_axes_nout():
    feature_axis = ng.make_axis(1, name='A')
    batch_axis = ng.make_axis(2, name='N')

    x = ng.placeholder([feature_axis, batch_axis])
    linear = Linear(nout=3, init=UniformInit(1.0, 1.0))(x)

    assert feature_axis not in linear.axes
    assert batch_axis in linear.axes
    assert linear.axes.batch_axis().length == 2
    assert linear.axes.sample_axes().lengths == (3, )
示例#20
0
def test_linear_W_axes_nout():
    feature_axis = ng.make_axis(1, name='A')
    batch_axis = ng.make_axis(2, name='N')

    x = ng.placeholder([feature_axis, batch_axis])
    linear = Linear(nout=3, init=UniformInit(1.0, 1.0))
    linear(x)

    assert linear.W.axes.batch_axis() is None
    assert feature_axis in linear.W.axes
    assert len(linear.W.axes - feature_axis) == 1
    assert (linear.W.axes - feature_axis)[0].length == 3
示例#21
0
    def baseline_derivative(self, x):
        X = ng.placeholder([ng.make_axis(), ng.make_axis(name='N')])
        X.axes.set_shape(x.shape)
        with ExecutorFactory() as ex:
            activation_derivative = ex.derivative(self.neon_activation(X), X)

            # hack to get derivatives
            result = activation_derivative(x)
            result = result.ravel()[0:result.size:(x.size + 1)]
            result = result.reshape(x.shape)

            return result
示例#22
0
def test_tensor_slice():
    """
    slicing a tensor should work like numpy
    """
    input_axes = ng.make_axes(
        [ng.make_axis(10), ng.make_axis(20),
         ng.make_axis(5)])

    x = ng.placeholder(axes=input_axes)

    assert x[:5].axes.full_lengths == (5, 20, 5)
    assert x[:, 2:7].axes.full_lengths == (10, 5, 5)
    assert x[:5, :, :-1].axes.full_lengths == (5, 20, 4)
示例#23
0
def test_pad_mixed():
    """
    mix 0 padding with non-0 padding
    """
    input_axes = ng.make_axes([ng.make_axis(1), ng.make_axis(1)])
    x = ng.variable(input_axes)

    pad = ng.pad(x, [0, 1])

    assert pad.axes[0].name == x.axes[0].name
    assert pad.axes[1].name == x.axes[1].name
    assert pad.axes[0].length == x.axes[0].length
    assert pad.axes[1].length != x.axes[1].length
示例#24
0
def test_rng_repetition():
    """
    Tests rng ops, to make sure they run every execution and not just initialization
    """
    axes = ng.make_axes([ng.make_axis(2), ng.make_axis(2)])
    y = ng.uniform(axes)
    mysum = ng.sum(y)
    trans = ng.transformers.make_transformer()
    rand_comp = trans.computation(mysum)
    val1 = rand_comp().copy()
    val2 = rand_comp().copy()
    assert val1 != val2
    trans.close()
示例#25
0
    def make_placeholders(self):
        batch_axis = ng.make_axis(length=self.batch_size, name="N")
        time_axis = ng.make_axis(length=self.time_steps, name="REC")
        feature_axis = ng.make_axis(length=self.nfeatures, name="feature_axis")

        dict = {}
        for k in self.data_arrays.keys():
            if k == 'inp_txt' or k == 'teacher_tgt':
                p_axes = ng.make_axes([batch_axis, time_axis, feature_axis])
            else:
                p_axes = ng.make_axes([batch_axis, time_axis])
            dict[k] = ng.placeholder(p_axes)

        return dict
示例#26
0
def test_broadcast():
    M = ng.make_axis(length=1)
    N = ng.make_axis(length=4)

    np_a = np.array([[1, 2, 3, 4]], dtype=np.float32)
    np_c = np.add(np_a, 2)

    a = ng.constant(np_a, [M, N])
    c = ng.add(a, 2)

    with executor(c) as _add:
        result = _add()

        assert np.allclose(result, np_c)
def concatenate_variables(request):
    num_vars, num_axes, concat_pos = request.param
    common_axes = [ng.make_axis(length=2) for _ in range(num_axes - 1)]
    x_list = list()
    np_list = list()
    ax = ng.make_axis(length=np.random.randint(3, 10))
    axes = ng.make_axes(common_axes[:concat_pos] + [ax] +
                        common_axes[concat_pos:])
    for _ in range(num_vars):
        var = np.random.uniform(0, 1, axes.full_lengths)
        np_list.append(var)
        x_list.append(ng.constant(var, axes=axes))

    return x_list, np_list, concat_pos
示例#28
0
def test_slice_nop():
    """
    slicing an axis shouldn't change the name
    """
    input_axes = ng.make_axes([ng.make_axis(1), ng.make_axis(1)])
    x = ng.variable(input_axes)

    s = ng.tensor_slice(x, [
        slice(None, None, None),
        slice(None, None, 1),
    ])

    assert s.axes[0] == x.axes[0]
    assert s.axes[1] == x.axes[1]
示例#29
0
def test_cputensor_add_constant():
    """TODO."""
    M = ng.make_axis(length=1)
    N = ng.make_axis(length=3)

    np_a = np.array([[1, 2, 3]], dtype=np.float32)
    np_c = np.add(np_a, 2)

    a = ng.constant(np_a, [M, N])
    b = ng.constant(2)
    c = ng.add(a, b)
    with executor(c) as ex:
        result = ex()
    assert np.array_equal(result, np_c)
示例#30
0
def test_concatenate():
    with ExecutorFactory() as ex:
        A = ng.make_axis(name='A', length=3)
        B = ng.make_axis(name='B', length=4)
        np_shape = (A.length, B.length)
        x0_np = -np.ones(np_shape)
        x1_np = np.ones(np_shape)
        x0_ng = ng.persistent_tensor([A, B], initial_value=x0_np).named('x0')
        x1_ng = ng.persistent_tensor([A, B], initial_value=x1_np).named('x1')
        j_np = np.concatenate([x0_np, x1_np], axis=0)
        j_ng = ng.concat_along_axis([x0_ng, x1_ng], A)
        f = ex.executor(j_ng)
        j_val = f()
        ng.testing.assert_allclose(j_val, j_np)