Exemple #1
0
def test_cross_entropy_loss_basic():
    """
    Some simple tests of cross_entropy_loss to get you started.
    Warning: these are not exhaustive.
    """
    y = np.array([[0, 1], [1, 0], [1, 0]])
    yhat = np.array([[.5, .5], [.5, .5], [.5, .5]])

    test1 = cross_entropy_loss(tf.constant(y, dtype=tf.int32),
                               tf.constant(yhat, dtype=tf.float32))
    with tf.Session() as sess:
        test1 = sess.run(test1)
    expected = -3 * np.log(.5)
    test_all_close("Cross-entropy test 1", test1, expected)

    print "Basic (non-exhaustive) cross-entropy tests pass"
Exemple #2
0
def test_cross_entropy_loss_basic():
    """
    Some simple tests of cross_entropy_loss to get you started.
    Warning: these are not exhaustive.
    """
    y = np.array([[0, 1], [1, 0], [1, 0]])
    yhat = np.array([[.5, .5], [.5, .5], [.5, .5]])

    test1 = cross_entropy_loss(
            tf.constant(y, dtype=tf.int32),
            tf.constant(yhat, dtype=tf.float32))
    with tf.Session() as sess:
        test1 = sess.run(test1)
    expected = -3 * np.log(.5)
    test_all_close("Cross-entropy test 1", test1, expected)

    print "Basic (non-exhaustive) cross-entropy tests pass"
Exemple #3
0
def test_softmax_basic():
    """
    Some simple tests of softmax to get you started. Warning: these are not exhaustive.
    """

    test1 = softmax(tf.constant(np.array([[1001, 1002], [3, 4]]), dtype=tf.float32))
    with tf.Session() as sess:
            test1 = sess.run(test1)
    test_all_close("Softmax test 1", test1, np.array([[0.26894142, 0.73105858],
                                                      [0.26894142, 0.73105858]]))

    test2 = softmax(tf.constant(np.array([[-1001, -1002]]), dtype=tf.float32))
    with tf.Session() as sess:
            test2 = sess.run(test2)
    test_all_close("Softmax test 2", test2, np.array([[0.73105858, 0.26894142]]))

    print "Basic (non-exhaustive) softmax tests pass\n"
Exemple #4
0
def test_softmax_basic():
    """
    Some simple tests of softmax to get you started.
    Warning: these are not exhaustive.
    """

    test1 = softmax(tf.constant(np.array([[1001, 1002], [3, 4]]), dtype=tf.float32))
    with tf.Session() as sess:
            test1 = sess.run(test1)
    test_all_close("Softmax test 1", test1, np.array([[0.26894142,  0.73105858],
                                                      [0.26894142,  0.73105858]]))

    test2 = softmax(tf.constant(np.array([[-1001, -1002]]), dtype=tf.float32))
    with tf.Session() as sess:
            test2 = sess.run(test2)
    test_all_close("Softmax test 2", test2, np.array([[0.73105858, 0.26894142]]))

    print "Basic (non-exhaustive) softmax tests pass\n"
Exemple #5
0
def test_softmax_basic():
    """
    Some simple tests of softmax to get you started.
    Warning: these are not exhaustive.
    """

    test1 = softmax(t.tensor(np.array([[1001, 1002], [3, 4]]),
                             dtype=t.float32))
    test_all_close(
        "Softmax test 1", test1,
        np.array([[0.26894142, 0.73105858], [0.26894142, 0.73105858]]))

    test2 = softmax(t.tensor(np.array([[-1001, -1002]]), dtype=t.float32))

    test_all_close("Softmax test 2", test2, np.array([[0.73105858,
                                                       0.26894142]]))

    print("Basic (non-exhaustive) softmax tests pass\n")
def test_cross_entropy_loss_basic():
    """
    Some simple tests of cross_entropy_loss to get you started.
    Warning: these are not exhaustive.
    """
    y = np.array([[0, 1], [1, 0], [1, 0]])
    yhat = np.array([[.5, .5], [.5, .5], [.5, .5]])

    # test1 = cross_entropy_loss(
    #         torch.Tensor([[0, 1], [1, 0], [1, 0]]),
    #        torch.Tensor([[.5, .5], [.5, .5], [.5, .5]]))
    # test1 = np.array(test1)
    test1 = cross_entropy_loss(
            dy.inputTensor([[0, 1], [1, 0], [1, 0]]),
           dy.inputTensor([[.5, .5], [.5, .5], [.5, .5]]))
    test1 = np.array(test1.value())
    expected = -3 * np.log(.5)
    test_all_close("Cross-entropy test 1", test1, expected)

    print "Basic (non-exhaustive) cross-entropy tests pass"
def test_softmax_basic():
    """
    Some simple tests of softmax to get you started.
    Warning: these are not exhaustive.
    """

    # test1 = softmax(torch.Tensor([[1001, 1002], [3, 4]]))
    # test1 = test1.numpy()
    test1 = softmax(dy.inputTensor([[1001, 1002], [3, 4]]))
    test1 = test1.npvalue();
    test_all_close("Softmax test 1", test1, np.array([[0.26894142,  0.73105858],
                                                      [0.26894142,  0.73105858]]))

    # test2 = softmax(torch.Tensor([[-1001, -1002]]))
    # test2 = test2.numpy()
    test2 = softmax(dy.inputTensor([[-1001, -1002]]))
    test2 = test2.npvalue();
    test_all_close("Softmax test 2", test2, np.array([[0.73105858, 0.26894142]]))

    print "Basic (non-exhaustive) softmax tests pass\n"
def test_cross_entropy_loss_basic():
    """
    Some simple tests of cross_entropy_loss to get you started.
    Warning: these are not exhaustive.
    """
    dy.renew_cg()
    #y = np.array([[0, 1], [1, 0], [1, 0]])
    #yhat = np.array([[.5, .5], [.5, .5], [.5, .5]])
    y = np.array([[0, 1], [1, 0], [1, 0]], dtype=np.float32)
    yhat = np.array([[.5, .5], [.5, .5], [.5, .5]], dtype=np.float32)

    test1 = cross_entropy_loss(dy.inputTensor(y), dy.inputTensor(yhat))
    #tf.constant(y, dtype=tf.int32),
    #tf.constant(yhat, dtype=tf.float32))
    #with tf.Session() as sess:
    #    test1 = sess.run(test1)
    expected = -3 * np.log(.5)
    test_all_close("Cross-entropy test 1",
                   test1.npvalue().reshape([]), expected)

    print "Basic (non-exhaustive) cross-entropy tests pass"
Exemple #9
0
def test_softmax_basic():
    """
    Some simple tests of softmax to get you started.
    Warning: these are not exhaustive.
    """

    test1 = softmax(
        tf.constant(np.array([[1001, 1002], [3, 4]]), dtype=tf.float32))
    with tf.Session() as sess:
        test1 = sess.run(test1)
    test_all_close(
        "Softmax test 1", test1,
        np.array([[0.26894142, 0.73105858], [0.26894142, 0.73105858]]))

    test2 = softmax(tf.constant(np.array([[-1001, -1002]]), dtype=tf.float32))
    with tf.Session() as sess:
        test2 = sess.run(test2)
    test_all_close("Softmax test 2", test2, np.array([[0.73105858,
                                                       0.26894142]]))

    # [CL] added some more tests for exhaustive
    test3 = softmax(tf.constant(np.array([[23,-2, 30], \
                                    [3,4,2], \
                                    [-10,-2,-8], \
                                    [1,20,-4]]), dtype=tf.float32))
    with tf.Session() as sess:
        test3 = sess.run(test3)
    test_all_close(
        "Softmax test 3", test3,
        np.array([[0.000911051, 1.26526E-14, 0.999088949],
                  [0.244728471, 0.665240956, 0.090030573],
                  [3.3452123e-04, 0.99719369, 2.4717962e-03],
                  [5.6028E-09, 0.999999994, 3.77513E-11]]))

    print "Basic (non-exhaustive) softmax tests pass\n"
#     test_all_close("Softmax test 2", test2, np.array([[0.73105858, 0.26894142]]))
#
#     print "Basic (non-exhaustive) softmax tests pass\n"

# def test_cross_entropy_loss_basic():
#     """
#     Some simple tests of cross_entropy_loss to get you started.
#     Warning: these are not exhaustive.
#     """
#     y = np.array([[0, 1], [1, 0], [1, 0]])
#     yhat = np.array([[.5, .5], [.5, .5], [.5, .5]])
#
#     test1 = cross_entropy_loss(tf.constant(y, dtype=tf.int32), tf.constant(yhat, dtype=tf.float32))
#     with tf.Session() as sess:
#         test1 = sess.run(test1)
#     expected = -3 * np.log(.5)
#     test_all_close("Cross-entropy test 1", test1, expected)
#
#     print "Basic (non-exhaustive) cross-entropy tests pass"

if __name__ == "__main__":
    y = np.array([[0, 1], [1, 0], [1, 0]])
    yhat = np.array([[.5, .5], [.5, .5], [.5, .5]])

    test1 = cross_entropy_loss(tf.constant(y, dtype=tf.int32),
                               tf.constant(yhat, dtype=tf.float32))
    with tf.Session() as sess:
        test1 = sess.run(test1)
    expected = -3 * np.log(.5)
    test_all_close("Cross-entropy test 1", test1, expected)