Exemple #1
0
def gp_ex_fig_22():
    """Similar to figure 2.2 in `Gaussian Processes for Machine Learning`__ by Rasmussen and Williams. 

    __ http://www.amazon.co.uk/Gaussian-Processes-Learning-Adaptive-Computation/dp/026218253X/
    """
    X = [
            [ -4.0 ],
            [ -3.0 ],
            [ -1.0 ],
            [  0.0 ],
            [  2.0 ],
    ]
    y = numpy.array( [
            -2.0,
            0.0,
            1.0,
            2.0,
            -1.0
    ] )
    LN = infpy.LogNormalDistribution
    Gamma = infpy.GammaDistribution
    k = (
            infpy.noise_kernel( .2, Gamma( 1.0, 1.0 ) )
            + infpy.ConstantKernel( )
            * infpy.SquaredExponentialKernel( [ 1.0 ], [ LN() ] )
    )
    gp = infpy.GaussianProcess( X, y, k )
    #infpy.gp_1D_predict( gp, 100, x_min = -5.0, x_max = 5.0 )
    infpy.gp_learn_hyperparameters( gp )
    infpy.gp_1D_predict( gp, 100, x_min = -5.0, x_max = 5.0 )
Exemple #2
0
def gp_ex_output_scale():
    """Examine how kernel parameters should change for scaled outputs"""
    start, end = 0.0, 1.0
    X = infpy.gp_1D_X_range( start, end, 0.04 )

    noise_level = 0.1
    small_y = infpy.gp_zero_mean(
            numpy.asarray(
                    [ x[0]**2 + noise_level * scipy.stats.norm().rvs()[0] for x in X ]
            )
    )
    # big_y = small_y * 100.0

    LN = infpy.LogNormalDistribution
    k = (
            infpy.noise_kernel( noise_level, LN( math.log( math.sqrt( noise_level ) ) ) )
            + infpy.ConstantSquaredKernel( 1.0, LN( math.log( 1.0 ) ) )
            * infpy.SquaredExponentialKernel( [ 1.0 ], [ LN() ] )
    )
    gp = infpy.GaussianProcess( X, small_y, k )

    infpy.gp_1D_predict( gp, 100 )

    infpy.gp_learn_hyperparameters( gp )
    infpy.gp_1D_predict( gp, 100 )
Exemple #3
0
def gp_ex_fixed_period_1():
    """Example of the fixed period kernel"""
    start, end = 0.0, 4.0
    X = infpy.gp_1D_X_range( start, end, 0.4 )
    y = numpy.asarray( [ math.sin( 2.0 * math.pi * x[0] ) for x in X ] )
    # pylab.plot( [ x[0] for x in X ], [ y1 for y1 in y ] )
    # pylab.show()
    # return
    LN = infpy.LogNormalDistribution
    Gamma = infpy.GammaDistribution
    k = (
            infpy.noise_kernel( 0.1, LN(  ) )
            + infpy.ConstantKernel( 1.0, Gamma(  ) )
            * infpy.FixedPeriod1DKernel( 1.0 )
    )
    gp = infpy.GaussianProcess( X, y, k )
    infpy.gp_learn_hyperparameters( gp )
    infpy.gp_1D_predict( gp )
Exemple #4
0
    if display:
        from pylab import figure, plot, show, fill, title
        figure()
        infpy.gp_plot_prediction( test_x, f_star_mean, V_f_star )
        plot(
                [ x[0] for (x, v) in training_points ],
                [ v for (x, v) in training_points ],
                'rs' )
        infpy.gp_title_and_show( gp )


#
# Learn hyperparameters
#
predict_values( display = True )
#sys.exit()
# print 'Params:', gp.k.get_parameters()
for initial_guess in [
#       [ 10.0 ],
#       [ 0.01 ],
        [ 0.01, 10.0 ],
        [ 0.01, 0.01 ],
        None,
]:
    print 'Learning from initial parameters:', initial_guess
    infpy.gp_learn_hyperparameters( gp, initial_guess )
    print dir(gp)
    print 'Learnt parameters: %s\nLL: %f' \
            % ( str( gp.k.params ), gp.LL )
    predict_values( display = True )