Example #1
0
def test_variable_exp():
    a_array = np.array([1, 2])
    a = Variable(np.array([1, 2], dtype=float))
    expected_result_variable = Variable(
        np.array([2.718281828459045, 7.3890560989306495]))
    expected_result_array = np.array([2.718281828459045, 7.3890560989306495])

    assert np.all(a_array == a.data)
    assert np.all(expected_result_variable == expected_result_array)

    assert id(a_array) != id(a.data)
    assert id(expected_result_variable) != id(expected_result_array)

    def validate_variable_result(result) -> None:
        assert expected_result_variable.isclose(result).all()
        assert isinstance(result, Variable)
        return

    def validate_array_result(result) -> None:
        assert np.isclose(result, expected_result_array).all()
        assert isinstance(result, np.ndarray)
        return

    # Variable
    validate_variable_result(np.exp(a))
    validate_variable_result(a.exp())

    # numpy
    validate_array_result(np.exp(a_array))

    # Verify Derivative
    sgd = autograd.optimizer.SGD(learning_rate=1e-3)
    exp_result = a.exp()
    variable_to_gradient = sgd.take_training_step(exp_result)
    assert all(
        isinstance(var, Variable) and isinstance(grad, np.ndarray)
        for var, grad in variable_to_gradient.items())
    assert np.all(
        np.isclose(variable_to_gradient[a], exp_result, rtol=1e-3, atol=1e-4))

    # Verify Trainability
    x = Variable(np.random.rand(2))
    y = 1
    sgd = autograd.optimizer.SGD(learning_rate=1)
    for training_step_index in range(1_000):
        y_hat = x.exp()
        diff = np.subtract(y, y_hat)
        loss = np.sum(diff**2)
        if training_step_index > 10 and loss.sum() < 1e-15:
            break
        sgd.take_training_step(loss)
Example #2
0
def softmax(x: Variable) -> Variable:
    e = x.exp()
    return e / e.sum()