Beispiel #1
0
def test_cross_entropy_binary(transformer_factory):
    """TODO."""
    N = ng.make_axis(name='N')
    W = ng.make_axis(name='W')

    delta = .001
    W.length = 20
    N.length = 128
    axes = ng.make_axes([W, N])
    p_u = ng.placeholder(axes)
    u = rng.uniform(-3.0, 3.0, p_u.axes)
    p_v = ng.placeholder(axes)
    v = rng.uniform(-3.0, 3.0, p_u.axes)

    y = ng.sigmoid(p_u)
    t = ng.softmax(p_v)
    val_u = ng.cross_entropy_binary_inner(y, t)

    ex = ExecutorFactory()
    dval_u_num_fun = ex.numeric_derivative(val_u, p_u, delta, p_v)
    dval_u_graph_fun = ex.derivative(val_u, p_u, p_v)

    dval_u_num = dval_u_num_fun(u, v)
    dval_u_graph = dval_u_graph_fun(u, v)
    np.testing.assert_allclose(dval_u_graph, dval_u_num, atol=1e-2, rtol=1e-2)
Beispiel #2
0
def test_sigmoid_value(transformer_factory):
    """ check the output of sigmoid is the same as np """
    axes = ng.make_axes([ng.make_axis(20), ng.make_axis(128)])
    p_x = ng.placeholder(axes)
    x = rng.uniform(-3.0, 3.0, p_x.axes)

    compare_f_at_x(ng.sigmoid(p_x), p_x, lambda x: 1.0 / (1 + np.exp(-x)), x, rtol=1e-5)
Beispiel #3
0
def test_sigmoid_deriv(input_tensor):
    """TODO."""
    p_u = input_tensor
    u = rng.uniform(-3.0, 3.0, p_u.axes)

    val_u = ng.sigmoid(p_u)

    check_derivative(val_u, p_u, 0.001, u, atol=1e-2, rtol=1e-2)
Beispiel #4
0
def test_log_sigmoid_deriv(transformer_factory, input_tensor):
    """TODO."""
    p_u = input_tensor
    u = rng.uniform(-3.0, 3.0, p_u.axes)

    log_val_u = ng.log(ng.sigmoid(p_u))

    check_derivative(log_val_u, p_u, 0.001, u, atol=1e-2, rtol=1e-2)
Beispiel #5
0
def test_log_sigmoid_deriv(transformer_factory):
    """TODO."""
    axes = ng.make_axes([ng.make_axis(20), ng.make_axis(128)])
    p_u = ng.placeholder(axes)
    u = rng.uniform(-3.0, 3.0, p_u.axes)

    log_val_u = ng.log(ng.sigmoid(p_u))

    check_derivative(log_val_u, p_u, 0.001, u, atol=1e-2, rtol=1e-2)
Beispiel #6
0
def test_sigmoid_value(transformer_factory, input_tensor):
    """ check the output of sigmoid is the same as np """
    p_x = input_tensor
    x = rng.uniform(-3.0, 3.0, p_x.axes)

    compare_f_at_x(ng.sigmoid(p_x),
                   p_x,
                   lambda x: 1.0 / (1 + np.exp(-x)),
                   x,
                   rtol=1e-5)
Beispiel #7
0
    def __call__(self, x):
        """
        Returns the sigmoidal activation.

        Arguments:
            x (Tensor or optree): Input value

        Returns:
            Tensor or optree: Output activation
        """
        return ng.sigmoid(x)
Beispiel #8
0
    def Sigmoid(self, cntk_op, inputs):
        """
        Returns element-wise sigmoid of inputs[0].

        Arguments:
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        return ng.sigmoid(inputs[0]).named(cntk_op.uid)
Beispiel #9
0
    def StableSigmoid(self, cntk_op, inputs):
        """
        Returns element-wise sigmoid of inputs[0].

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        return ng.sigmoid(inputs[0]).named(cntk_op.uid)
    def construct_sigmoid_pattern(self):
        """
        Generate graph op that represents a pattern for Sigmoid operation
        ng.sigmoid(x)

        Returns:
            Single pattern that matches Sigmoid
        """

        self.sigmoid_x_label = "X"
        x = PatternLabelOp(self.sigmoid_x_label, axes={ng.make_axis(name='N')})
        sigmoid_op = ng.sigmoid(x)
        return sigmoid_op
Beispiel #11
0
def test_cross_entropy_binary_logistic_shortcut(input_tensor):
    """TODO."""
    p_u = input_tensor
    p_v = ng.placeholder(p_u.axes)
    u = rng.uniform(-3.0, 3.0, p_u.axes)
    v = np_softmax(rng.uniform(-3.0, 3.0, p_u.axes), 0)

    cel = cross_entropy_binary_logistic(u, v)
    cel_shortcut = cross_entropy_binary_logistic_shortcut(u, v)
    ng.testing.assert_allclose(cel, cel_shortcut, rtol=1e-5)

    with executor(ng.cross_entropy_binary_inner(ng.sigmoid(p_u), p_v), p_u, p_v) as ex:
        cel_graph = ex(u, v)
    ng.testing.assert_allclose(cel, cel_graph, rtol=1e-5)
Beispiel #12
0
    def __call__(self, batch_size, placeholders):

        embedding_ops = []

        for idx, lut in enumerate(self.luts):
            embedding_op = lut(placeholders['embeddings_placeholders'][idx])

            embedding_ops.append(embedding_op)

        X_deep = ng.concat_along_axis([placeholders['X_d']] + embedding_ops,
                                      ng.make_axis(name="F"))

        self.wide_deep = ng.sigmoid(
            self.deep_layers(X_deep) + self.linear_layer(placeholders['X_w']) +
            ng.variable((), initial_value=0.5).named('b'))

        return self.wide_deep
Beispiel #13
0
def test_cross_entropy_binary_logistic_shortcut(transformer_factory):
    """TODO."""
    N = ng.make_axis(name='N')
    W = ng.make_axis(name='W')

    W.length = 20
    N.length = 128
    axes = ng.make_axes([W, N])
    p_u = ng.placeholder(axes)
    u = rng.uniform(-3.0, 3.0, p_u.axes)
    p_v = ng.placeholder(axes)
    v = np_softmax(rng.uniform(-3.0, 3.0, p_u.axes), 0)

    cel = cross_entropy_binary_logistic(u, v)
    cel_shortcut = cross_entropy_binary_logistic_shortcut(u, v)
    np.testing.assert_allclose(cel, cel_shortcut, rtol=1e-5)

    cel_graph = executor(ng.cross_entropy_binary_inner(ng.sigmoid(p_u), p_v), p_u, p_v)(u, v)
    np.testing.assert_allclose(cel, cel_graph, rtol=1e-5)
Beispiel #14
0
def test_logreg(transformer_factory):
    # xs: (C, N), y: (N,)
    xs = np.array([[0.52, 0.88, 0.52, 0.74], [1.12, -1.08, 0.06, -2.49],
                   [0.77, 0.15, -1.3, 1.39]])
    ys = np.array([1, 1, 0, 1])
    max_iter = 10
    alpha = 0.1
    thetas = np.array([0., 0., 0.])

    np_logreg = NumpyLogreg(xs, ys, thetas)

    C, N = ng.make_axis(length=3), ng.make_axis(length=4)

    # input tensors
    xs_v = ng.placeholder((C, N))
    ys_v = ng.placeholder([N])
    alpha_v = ng.placeholder(())
    thetas_var = ng.variable([C - 1], initial_value=thetas)

    # define ops
    ys_pred = ng.sigmoid(ng.dot(thetas_var, xs_v))
    log_likelihoods = ng.log(ys_pred) * ys_v + ng.log(1 - ys_pred) * (1 - ys_v)
    loss = -ng.sum(log_likelihoods, reduction_axes=[N])
    grad_comp = ng.deriv(loss, thetas_var)
    grad = ng.sequential([
        ng.assign(thetas_var, thetas_var - alpha_v * grad_comp), thetas_var,
        grad_comp
    ])

    # transformer
    transformer = ngt.make_transformer()
    train_eval_func = transformer.computation([grad, loss, thetas_var], xs_v,
                                              ys_v, alpha_v)

    # evaluate
    for i in range(max_iter):
        grad_np, loss_np, thetas_np = np_logreg.optimize(alpha)
        grad_ng, loss_ng, thetas_ng = train_eval_func(xs, ys, alpha)
        assert ng.testing.allclose(loss_np, loss_ng)
        assert ng.testing.allclose(grad_np, grad_ng)
        assert ng.testing.allclose(thetas_np, thetas_ng)

    transformer.close()
Beispiel #15
0
def test_cross_entropy_binary(input_tensor):
    """TODO."""
    p_u = input_tensor
    p_v = ng.placeholder(p_u.axes)

    u = rng.uniform(-3.0, 3.0, p_u.axes)
    v = rng.uniform(-3.0, 3.0, p_u.axes)

    delta = .001

    y = ng.sigmoid(p_u)
    t = ng.softmax(p_v)
    val_u = ng.cross_entropy_binary_inner(y, t)

    with ExecutorFactory() as ex:
        dval_u_num_fun = ex.numeric_derivative(val_u, p_u, delta, p_v)
        dval_u_graph_fun = ex.derivative(val_u, p_u, p_v)

        dval_u_num = dval_u_num_fun(u, v)
        dval_u_graph = dval_u_graph_fun(u, v)
        ng.testing.assert_allclose(dval_u_graph, dval_u_num, atol=1e-2, rtol=1e-2)
Beispiel #16
0
def Sigmoid(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    return ng.sigmoid(ng_inputs[0])
Beispiel #17
0
def test_convolution(transformer_factory):
    """
    test convolution forward path
    """
    N = 128
    C, K = 3, 8
    D, T = 1, 1
    H = W = 32
    R = S = 2

    padding = dict(pad_d=0, pad_h=0, pad_w=0)
    strides = dict(str_d=1, str_h=1, str_w=1)
    conv_params = padding.copy()
    conv_params.update(strides)

    ax_i = ng.make_axes([ax.C, ax.D, ax.H, ax.W, ax.N])
    ax_f = ng.make_axes([ax.C, ax.T, ax.R, ax.S, ax.K])
    ax_i.set_shape((C, D, H, W, N))
    ax_f.set_shape((C, T, R, S, K))
    ax_o = ng.make_axes([
        ng.make_axis(ax_f.role_axes(ar.Channelout)[0].length,
                     name='C',
                     roles=[ar.Channel]),
        spatial_axis(ax_i,
                     ax_f,
                     padding['pad_d'],
                     strides['str_d'],
                     role=ar.Depth),
        spatial_axis(ax_i,
                     ax_f,
                     padding['pad_h'],
                     strides['str_h'],
                     role=ar.Height),
        spatial_axis(ax_i,
                     ax_f,
                     padding['pad_w'],
                     strides['str_w'],
                     role=ar.Width), ax.N
    ])

    inputs = ng.placeholder(axes=ax_i)
    filters = ng.placeholder(axes=ax_f)

    # randomly initialize
    input_value = rng.uniform(-1, 1, ax_i)
    filter_value = rng.uniform(-1, 1, ax_f)

    assert input_value.shape == ax_i.lengths
    assert filter_value.shape == ax_f.lengths

    inputs = ng.placeholder(ax_i)
    filters = ng.placeholder(ax_f)

    output = ng.convolution(conv_params, inputs, filters, axes=ax_o)
    targets = ng.placeholder(axes=output.axes)

    costs = ng.cross_entropy_binary(ng.sigmoid(output), targets)
    error = ng.sum(costs, out_axes=()) / ng.batch_size(costs)
    d_inputs = ng.deriv(error, inputs)
    d_filters = ng.deriv(error, filters)

    targets_value = rng.uniform(.1, 0.9, output.axes)

    conv_executor = executor([output, error, d_inputs, d_filters], inputs,
                             filters, targets)
    result_ng, err_ng, gradI_ng, gradF_ng = conv_executor(
        input_value, filter_value, targets_value)

    # Now compute reference values via NEON
    NervanaObject.be.bsz = N
    neon_layer = Convolution(fshape=(R, S, K),
                             padding=padding,
                             strides=strides)

    inp = neon_layer.be.array(input_value.reshape(C * H * W * D, N))
    neon_layer.W = neon_layer.be.array(filter_value.reshape(C * R * S * T, K))
    neon_layer.dW = neon_layer.be.empty_like(neon_layer.W)
    neon_layer.configure((C, H, W))
    neon_layer.prev_layer = True
    neon_layer.allocate()
    neon_layer.set_deltas(DummyDeltaBuffers())

    result_ne = neon_layer.fprop(inp).get().reshape(output.axes.lengths)

    act_result_ne = 1. / (1.0 + np.exp(-result_ne))
    err = neon_layer.be.array(
        (act_result_ne - targets_value).reshape(-1, N) / float(N))
    gradI_ne = neon_layer.bprop(err).get().reshape(ax_i.lengths)
    gradF_ne = neon_layer.dW.get().reshape(ax_f.lengths)

    # Compare fprop
    np.testing.assert_allclose(result_ng, result_ne, rtol=0, atol=1e-6)

    # Compare bprop
    np.testing.assert_allclose(gradI_ng, gradI_ne, rtol=0, atol=1e-6)

    # Compare update
    np.testing.assert_allclose(gradF_ng, gradF_ne, rtol=0, atol=1e-4)
Beispiel #18
0
def test_pooling():
    """
    test pooling forward and backward path
    """
    N = 128
    C = 3
    D = 1
    H = W = 32

    J = T = 1
    R = S = 2
    ngt.make_transformer()

    padding = dict(pad_d=0, pad_h=0, pad_w=0, pad_c=0)
    strides = dict(str_d=1, str_h=1, str_w=1, str_c=1)
    fshape = dict(J=J, T=T, R=R, S=S)

    pool_params = dict(op='max')
    pool_params.update(padding)
    pool_params.update(strides)
    pool_params.update(fshape)

    ax_i = ng.make_axes([ax.C, ax.D, ax.H, ax.W, ax.N])
    ax_i.set_shape((C, D, H, W, N))
    inputs = ng.placeholder(axes=ax_i)

    ax_o = ng.make_axes([
        ng.make_axis(roles=[ar.features_input]).named('C'),
        ng.make_axis(roles=[ar.features_0]).named('D'),
        ng.make_axis(roles=[ar.features_1]).named('H'),
        ng.make_axis(roles=[ar.features_2]).named('W'), ax.N
    ])

    ax_o[:-1].set_shape((output_dim(C, J, padding['pad_c'], strides['str_c']),
                         output_dim(D, T, padding['pad_d'], strides['str_d']),
                         output_dim(H, R, padding['pad_h'], strides['str_h']),
                         output_dim(W, S, padding['pad_w'], strides['str_w'])))
    # randomly initialize
    input_value = rng.uniform(-1, 1, ax_i)

    assert input_value.shape == ax_i.lengths

    # compute convolution with graph
    output = ng.pooling(pool_params, inputs, axes=ax_o)
    targets = ng.placeholder(axes=ax_o)

    costs = ng.cross_entropy_binary(ng.sigmoid(output), targets)
    error = ng.sum(costs, out_axes=()) / ng.batch_size(costs)
    d_inputs = ng.deriv(error, inputs)

    targets_value = rng.uniform(.1, 0.9, output.axes)

    with executor([output, error, d_inputs], inputs, targets) as conv_executor:
        result_ng, err_ng, gradI_ng = conv_executor(input_value, targets_value)

    # Now compute reference values via NEON
    NervanaObject.be.bsz = N
    neon_layer = Pooling(fshape=fshape,
                         padding=padding,
                         strides=strides,
                         op="max")

    inp = neon_layer.be.array(input_value.reshape(C * H * W * D, N))
    neon_layer.configure((C, H, W))
    neon_layer.prev_layer = True
    neon_layer.allocate()
    neon_layer.set_deltas(DummyDeltaBuffers())

    result_ne = neon_layer.fprop(inp).get().reshape(output.axes.lengths)

    act_result_ne = 1. / (1.0 + np.exp(-result_ne))
    err = neon_layer.be.array(
        (act_result_ne - targets_value).reshape(-1, N) / float(N))
    gradI_ne = neon_layer.bprop(err).get().reshape(ax_i.lengths)

    # Compare fprop
    ng.testing.assert_allclose(result_ng, result_ne, rtol=0, atol=1e-6)

    # Compare bprop
    ng.testing.assert_allclose(gradI_ng, gradI_ne, rtol=0, atol=1e-6)
Beispiel #19
0
def sigmoid(x, name=None):
    return ng.sigmoid(x).named(name)