Пример #1
0
def test_sigmoid_cross_entropy_op():
    y = bp.Variable("y")
    label = bp.Variable("label")
    loss = bp.sigmoid_cross_entropy_op(y, label)
    y_val = np.array([1.0, 2.0, 3.0])
    label_val = np.array([4.0, 5.0, 6.0])
    loss_expect = -y_val * label_val + np.log(1.0 + np.exp(y_val))
    executor = bp.Executor()
    loss_val, = executor.forward([loss],
                                 feed_dict={
                                     y: y_val,
                                     label: label_val
                                 })
    np.testing.assert_almost_equal(loss_val, loss_expect)

    y_grad, label_grad = executor.backward(loss, [y, label],
                                           feed_dict={
                                               y: y_val,
                                               label: label_val
                                           })

    y_grad_expect = -label_val + sigmoid(y_val)
    label_grad_expect = -y_val
    np.testing.assert_almost_equal(y_grad, y_grad_expect)
    np.testing.assert_almost_equal(label_grad, label_grad_expect)
Пример #2
0
def main():
    x_val, y_val = get_data()
    x = bp.Variable("x")
    w = bp.Variable("w")
    b = bp.Variable("z")
    y = bp.Variable("z")

    y_pred = bp.matmul_op(x, w) + b
    loss = (y + -1 * y_pred) * (y + -1 * y_pred)

    w_val = np.random.rand(2, 1)
    b_val = np.random.rand(1, 1)

    executor = bp.Executor()

    i = 0
    n = 100000
    ln = 0.001
    while i < n:
        index = i % 100
        if (i % 20 == 0):
            print("step {}, w={}, b={}".format(i, w_val, b_val))
        w_grad, b_grad = executor.backward(loss, [w, b],
                                           feed_dict={
                                               x: x_val[index:index + 1],
                                               y: y_val[index:index + 1],
                                               w: w_val,
                                               b: b_val
                                           })
        w_val = w_val - ln * w_grad
        b_val = b_val - ln * b_grad
        i += 1
Пример #3
0
def lr():
    label, feature = load_libsvm("data/agaricus.txt")
    print("total example: {}".format(len(label)))

    x = bp.Variable("x")
    w = bp.Variable("w1")
    b = bp.Variable("b1")
    y = bp.Variable("y")

    y_pred = bp.matmul_op(x, w) + b

    prob = bp.sigmoid_op(y_pred)

    single_loss = bp.sigmoid_cross_entropy_op(logit=y_pred, label=y)

    w_val = np.random.rand(126, 1)
    b_val = np.random.rand(1, 1)

    ln = 0.0001

    excutor = bp.Executor()

    for i in range(1000000):
        index = i % len(feature)
        if i % 1000 == 0:
            loss_val, = excutor.forward([single_loss],
                                        feed_dict={
                                            x: feature,
                                            w: w_val,
                                            b: b_val,
                                            y: label
                                        })
            prob_val, = excutor.forward([prob],
                                        feed_dict={
                                            x: feature,
                                            w: w_val,
                                            b: b_val,
                                            y: label
                                        })
            print("step {}, loss={}, acc={}".format(i, np.mean(loss_val),
                                                    cal_acc(label, prob_val)))

        w1_grad, b1_grad = excutor.backward(single_loss, [w, b],
                                            feed_dict={
                                                x: feature[index:index + 1],
                                                w: w_val,
                                                b: b_val,
                                                y: label[index:index + 1]
                                            })
        w_val = w_val - ln * w1_grad
        b_val = b_val - ln * b1_grad
Пример #4
0
def test_mul_two_var():
    x = bp.Variable("x")
    y = bp.Variable("y")
    z = x * y
    excutor = bp.Executor()
    x_val = np.ones(3) * 3
    y_val = np.ones(3) * 5
    z_val, x_val_ = excutor.forward([z, x], {x: x_val, y: y_val})
    assert np.array_equal(x_val, x_val_)
    assert np.array_equal(z_val, x_val * y_val)

    x_grad, y_grad = excutor.backward(z, [x, y], {x: x_val, y: y_val})
    assert np.array_equal(x_grad, y_val)
    assert np.array_equal(y_grad, x_val)
Пример #5
0
def test_matmul_two_var():
    x = bp.Variable("x")
    y = bp.Variable("y")
    z = bp.matmul_op(x, y)

    x_val = np.array([[1, 2, 3], [4, 5, 6]])
    y_val = np.array([[7, 8, 9, 10], [11, 12, 13, 14], [15, 16, 17, 18]])
    z_val = np.matmul(x_val, y_val)

    excutor = bp.Executor()
    z_result, = excutor.forward([z], {x: x_val, y: y_val})
    assert np.array_equal(z_result, z_val)

    x_grad, y_grad = excutor.backward(z, [x, y], {x: x_val, y: y_val})
    z_grad = np.ones_like(z_result)

    expect_x_grad = np.matmul(z_grad, np.transpose(y_val))
    expect_y_grad = np.matmul(np.transpose(x_val), z_grad)

    assert np.array_equal(x_grad, expect_x_grad)
    assert np.array_equal(y_grad, expect_y_grad)
Пример #6
0
def test_add_byconst_op():
    x = bp.Variable("x")
    y = 1 + x

    executor = bp.Executor()

    x_val = np.array([10, 20])
    y_val, = executor.forward([y], feed_dict={x: x_val})

    np.testing.assert_array_equal(y_val, x_val + 1)

    x_grad, = executor.backward(y, [x], feed_dict={x: x_val})
    np.testing.assert_array_equal(x_grad, np.array([1, 1]))
Пример #7
0
def test_relu_op():
    x = bp.Variable("x")
    y = bp.relu_op(x)

    x_val = np.array([[1, -2, 3], [-1, -1, 3]])
    executor = bp.Executor()

    y_val, = executor.forward([y], {x: x_val})
    y_expect = np.array([[1, 0, 3], [0, 0, 3]])
    np.testing.assert_array_equal(y_val, y_expect)

    x_grad, = executor.backward(y, [x], {x: x_val})
    x_grad_expect = np.array([[1, 0, 1], [0, 0, 1]])
    np.testing.assert_array_equal(x_grad, x_grad_expect)
Пример #8
0
def test_sigmoid_op():
    x = bp.Variable("x")
    y = bp.sigmoid_op(x)

    x_val = np.array([1, 2])
    excecutor = bp.Executor()
    y_val, = excecutor.forward([y], feed_dict={x: x_val})
    y_expect = np.array([1 / (1 + math.exp(-1.0)), 1 / (1 + math.exp(-2.0))])
    np.testing.assert_almost_equal(y_val, y_expect)

    x_grad, = excecutor.backward(y, [x], {x: x_val})
    x_grad_expet = np.array([
        math.exp(-1.0) / (1 + math.exp(-1.0)) / (1 + math.exp(-1.0)),
        math.exp(-2.0) / (1 + math.exp(-2.0)) / (1 + math.exp(-2.0))
    ])
    np.testing.assert_almost_equal(x_grad, x_grad_expet)
Пример #9
0
def test_exp_var():
    x = bp.Variable("x")
    y = bp.exp_op(x)

    x_val = np.array([1.0, 1.0])
    y_val = np.exp(x_val)

    executor = bp.Executor()

    y_result, = executor.forward([y], {x: x_val})

    assert np.array_equal(y_result, y_val)
    print(y_val)
    x_grad, = executor.backward(y, [x], {x: x_val})
    print(x_grad)
    np.testing.assert_almost_equal(x_grad, y_val)
Пример #10
0
def mlp():
    label, feature = load_libsvm("data/agaricus.txt")

    x = bp.Variable("x")
    w1 = bp.Variable("w1")
    b1 = bp.Variable("b1")
    w2 = bp.Variable("w2")
    b2 = bp.Variable("b2")
    y = bp.Variable("y")

    h1 = bp.relu_op(bp.matmul_op(x, w1) + b1)
    y_pred = bp.matmul_op(h1, w2) + b2
    prob = bp.sigmoid_op(y_pred)
    single_loss = bp.sigmoid_cross_entropy_op(logit=y_pred, label=y)

    w1_val = np.random.rand(126, 32)
    b1_val = np.random.rand(1, 32)
    w2_val = np.random.rand(32, 1)
    b2_val = np.random.rand(1, 1)

    ln = 0.001

    excutor = bp.Executor()

    for i in range(10000000):
        index = i % len(feature)
        if i % 1000 == 0:
            loss_val, prob_val = excutor.forward(
                [single_loss, prob],
                feed_dict={
                    x: feature,
                    w1: w1_val,
                    b1: b1_val,
                    w2: w2_val,
                    b2: b2_val,
                    y: label
                })
            print("step {}, loss={}, acc={}, ln={}".format(
                i, np.mean(loss_val), cal_acc(label, prob_val), ln))
        if i % 500000 == 0:
            ln = ln / 10
        w1_grad, b1_grad, w2_grad, b2_grad = excutor.backward(
            single_loss, [w1, b1, w2, b2],
            feed_dict={
                x: feature[index:index + 1],
                w1: w1_val,
                b1: b1_val,
                w2: w2_val,
                b2: b2_val,
                y: label[index:index + 1]
            })
        w1_val = w1_val - ln * w1_grad
        b1_val = b1_val - ln * b1_grad
        w2_val = w2_val - ln * w2_grad
        b2_val = b2_val - ln * b2_grad