Example #1
0
def main():
    np.random.seed(1)
    Xtrain, ytrain, params_true, true_fun, ttl = make_data_linreg_1d(21, 'linear')
    
    model = LinregModel(1, True)
    params_init = model.init_params()
    print model
    
    # Check that OLS and BFGS give same result
    params_ols, loss_ols = model.ols_fit(Xtrain, ytrain)
    obj_fun = lambda params: model.objective(params, Xtrain, ytrain)
    grad_fun = lambda params: model.gradient(params, Xtrain, ytrain)
    params_bfgs, loss_bfgs = bfgs(obj_fun, grad_fun, params_init) 
    assert(np.allclose(params_bfgs, params_ols))
    assert(np.allclose(loss_bfgs, loss_ols))

    # Check that analytic gradient and automatic gradient give same result
    # when evaluated on training data
    grad_fun = autograd.grad(obj_fun)
    grad_auto = grad_fun(params_init)
    grad_finite_diff = autograd.util.nd(lambda p : obj_fun(p), params_init)[0]
    grad_analytic = model.gradient(params_init, Xtrain, ytrain)
    assert(np.allclose(grad_auto, grad_finite_diff))
    assert(np.allclose(grad_auto, grad_analytic))

    params_autograd, loss_autograd = bfgs(obj_fun, grad_fun, params_init) 
    assert(np.allclose(params_bfgs, params_autograd))
    assert(np.allclose(loss_bfgs, loss_autograd))
    
    print "All assertions passed"
def main():
    np.random.seed(1)
    Xtrain, ytrain, params_true, true_fun, ttl = make_data_linreg_1d(
        21, 'linear')

    model = LinregModel(1, True)
    params_init = model.init_params()
    print(model)

    # Check that OLS and BFGS give same result
    params_ols, loss_ols = model.ols_fit(Xtrain, ytrain)
    obj_fun = lambda params: model.objective(params, Xtrain, ytrain)
    grad_fun = lambda params: model.gradient(params, Xtrain, ytrain)
    params_bfgs, loss_bfgs = bfgs(obj_fun, grad_fun, params_init)
    assert (np.allclose(params_bfgs, params_ols))
    assert (np.allclose(loss_bfgs, loss_ols))

    # Check that analytic gradient and automatic gradient give same result
    # when evaluated on training data
    grad_fun = autograd.grad(obj_fun)
    grad_auto = grad_fun(params_init)
    grad_finite_diff = autograd.util.nd(lambda p: obj_fun(p), params_init)[0]
    grad_analytic = model.gradient(params_init, Xtrain, ytrain)
    assert (np.allclose(grad_auto, grad_finite_diff))
    assert (np.allclose(grad_auto, grad_analytic))

    params_autograd, loss_autograd = bfgs(obj_fun, grad_fun, params_init)
    assert (np.allclose(params_bfgs, params_autograd))
    assert (np.allclose(loss_bfgs, loss_autograd))

    print("All assertions passed")
Example #3
0
def main():
    np.random.seed(1)
    xtrain, ytrain, params_true = make_data_linreg_1d()
    N = xtrain.shape[0]
    D = 2
    Xtrain = np.c_[np.ones(N), xtrain]  # add column of 1s

    params_init = np.zeros(D)
    logger = MinimizeLogger(LinregModel.objective, (Xtrain, ytrain),
                            print_freq=10)

    # Check that OLS and BFGS give same result
    params_ols, loss_ols = LinregModel.ols_fit(Xtrain, ytrain)
    obj_fun = LinregModel.objective
    grad_fun = LinregModel.gradient
    params_bfgs, loss_bfgs, logger = bfgs_fit(params_init, obj_fun, grad_fun,
                                              (Xtrain, ytrain), logger)
    assert (np.allclose(params_bfgs, params_ols))
    assert (np.allclose(loss_bfgs, loss_ols))

    # Check that analytic gradient and automatic gradient give same result
    grad_fun = autograd.grad(obj_fun)
    grad_auto = grad_fun(params_init, Xtrain, ytrain)
    grad_finite_diff = autograd.util.nd(lambda p: obj_fun(p, Xtrain, ytrain),
                                        params_init)[0]
    grad_analytic = LinregModel.gradient(params_init, Xtrain, ytrain)
    assert (np.allclose(grad_auto, grad_finite_diff))
    assert (np.allclose(grad_auto, grad_analytic))

    params_autograd, loss_autograd = bfgs_fit(params_init, obj_fun, grad_fun,
                                              (Xtrain, ytrain))
    assert (np.allclose(params_bfgs, params_autograd))
    assert (np.allclose(loss_bfgs, loss_autograd))

    print "All assertions passed"

    # Plot loss vs time
    print logger.obj_trace
    ax = plot_loss_trace(logger.obj_trace, loss_ols)
    ax.set_title('BFGS')

    # Plot 2d trajectory of parameter values over time
    loss_fun = lambda w0, w1: LinregModel.objective([w0, w1], xtrain, ytrain)
    ax = plot_error_surface(loss_fun, params_true)
    plot_param_trace(logger.param_trace, ax)
    ax.set_title('BFGS')
    plt.show()
Example #4
0
def main():
    np.random.seed(1)
    xtrain, ytrain, params_true = make_data_linreg_1d()
    N = xtrain.shape[0]
    D = 2
    Xtrain = np.c_[np.ones(N), xtrain] # add column of 1s
    
    params_init = np.zeros(D)
    logger = MinimizeLogger(LinregModel.objective, (Xtrain, ytrain), print_freq=10)
    
    # Check that OLS and BFGS give same result
    params_ols, loss_ols = LinregModel.ols_fit(Xtrain, ytrain)
    obj_fun = LinregModel.objective
    grad_fun = LinregModel.gradient
    params_bfgs, loss_bfgs, logger = bfgs_fit(params_init, obj_fun, grad_fun, (Xtrain, ytrain), logger) 
    assert(np.allclose(params_bfgs, params_ols))
    assert(np.allclose(loss_bfgs, loss_ols))

    # Check that analytic gradient and automatic gradient give same result
    grad_fun = autograd.grad(obj_fun)
    grad_auto = grad_fun(params_init, Xtrain, ytrain)
    grad_finite_diff = autograd.util.nd(lambda p : obj_fun(p, Xtrain, ytrain), params_init)[0]
    grad_analytic = LinregModel.gradient(params_init, Xtrain, ytrain)
    assert(np.allclose(grad_auto, grad_finite_diff))
    assert(np.allclose(grad_auto, grad_analytic))

    params_autograd, loss_autograd = bfgs_fit(params_init, obj_fun, grad_fun, (Xtrain, ytrain)) 
    assert(np.allclose(params_bfgs, params_autograd))
    assert(np.allclose(loss_bfgs, loss_autograd))
    
    print "All assertions passed"

    # Plot loss vs time
    print logger.obj_trace
    ax = plot_loss_trace(logger.obj_trace, loss_ols) 
    ax.set_title('BFGS')
    
    # Plot 2d trajectory of parameter values over time
    loss_fun = lambda w0, w1: LinregModel.objective([w0, w1], xtrain, ytrain)
    ax = plot_error_surface(loss_fun, params_true)
    plot_param_trace(logger.param_trace, ax)
    ax.set_title('BFGS')
    plt.show()