Exemplo n.º 1
0
 def setUpClass(cls):
     super(TestMultiFull, cls).setUpClass()
     likelihood = likelihoods.Softmax()
     kernel = [
         kernels.RadialBasis(input_dim=2,
                             lengthscale=1.0,
                             std_dev=1.0,
                             white=0.0) for i in range(2)
     ]
     inducing_locations = np.array([[1.0, 2.0, 3.0, 4.0]])
     cls.model = autogp.GaussianProcess(likelihood_func=likelihood,
                                        kernel_funcs=kernel,
                                        inducing_inputs=inducing_locations,
                                        num_components=2,
                                        diag_post=False,
                                        num_samples=1)
     cls.session.run(tf.global_variables_initializer())
Exemplo n.º 2
0
 def setUpClass(cls):
     super(TestSimpleFull, cls).setUpClass()
     likelihood = likelihoods.Gaussian(1.0)
     kernel = [
         kernels.RadialBasis(input_dim=1,
                             lengthscale=1.0,
                             std_dev=1.0,
                             white=0.0)
     ]
     # In most of our unit test, we will replace this value with something else.
     inducing_inputs = np.array([[1.0]])
     cls.model = autogp.GaussianProcess(likelihood_func=likelihood,
                                        kernel_funcs=kernel,
                                        inducing_inputs=inducing_inputs,
                                        num_components=1,
                                        diag_post=False,
                                        num_samples=10)
     cls.session.run(tf.initialize_all_variables())
Exemplo n.º 3
0
    FLAGS = util.util.get_flags()
    BATCH_SIZE = FLAGS.batch_size
    LEARNING_RATE = FLAGS.learning_rate
    DISPLAY_STEP = FLAGS.display_step
    EPOCHS = FLAGS.n_epochs
    NUM_SAMPLES = FLAGS.mc_train
    NUM_INDUCING = FLAGS.n_inducing
    IS_ARD = FLAGS.is_ard

    data, test, _ = datasets.import_mnist()

    # Setup initial values for the model.
    likelihood = likelihoods.Softmax()
    kern = [
        kernels.RadialBasis(data.X.shape[1],
                            lengthscale=10.0,
                            input_scaling=IS_ARD) for i in xrange(10)
    ]
    # kern = [kernels.ArcCosine(X.shape[1], 2, 3, 5.0, 1.0, input_scaling=True) for i in xrange(10)] #RadialBasis(X.shape[1], input_scaling=True) for i in xrange(10)]

    Z = init_z(data.X, NUM_INDUCING)
    m = autogp.GaussianProcess(likelihood, kern, Z, num_samples=NUM_SAMPLES)

    # setting up loss to be reported during training
    error_rate = losses.ZeroOneLoss(data.Dout)

    import time
    otime = time.time()
    o = tf.train.RMSPropOptimizer(LEARNING_RATE)
    start = time.time()
    m.fit(data,
Exemplo n.º 4
0
    row = list(range(i, i + output_dim * (node_dim - 1) + 1, output_dim))
    block_struct[i] = row

nodes = [[x] for x in list(
    range(output_dim * node_dim, output_dim * node_dim + output_dim))]
block_struct = block_struct + nodes

# link inputs used repeatedly but can have different link inputs
link_inputs = [d_link for i in range(output_dim)] + [
    1.0 for i in range(output_dim)
]  # for full row blocks, independent nodes

# create 'between' kernel list
klink_rows = [
    kernels.CompositeKernel('mul', [
        kernels.RadialBasis(
            2, std_dev=1.0, lengthscale=1.0, white=0.01, input_scaling=IS_ARD),
        kernels.CompactSlice(
            2, active_dims=[0, 1], lengthscale=1.0, input_scaling=IS_ARD)
    ]) for i in range(output_dim)
]

klink_g = [1.0 for i in range(output_dim)]
kernlink = klink_rows + klink_g

# create 'within' kernel list
# setup for example data - extract lag features for each site to use in associated node functions/blocks
lag_active_dims_s = [[] for _ in range(output_dim)]
for i in range(output_dim):
    lag_active_dims_s[i] = list(range(lag_dim * i, lag_dim * (i + 1)))

k_rows = [
Exemplo n.º 5
0
Z = init_z(data.X, NUM_INDUCING)
likelihood = likelihoods.Logistic()  # Setup initial values for the model.

if KERNEL == 'arccosine':
    kern = [
        kernels.ArcCosine(data.X.shape[1],
                          degree=DEGREE,
                          depth=DEPTH,
                          lengthscale=LENGTHSCALE,
                          std_dev=1.0,
                          input_scaling=IS_ARD) for i in range(1)
    ]
else:
    kern = [
        kernels.RadialBasis(data.X.shape[1],
                            lengthscale=LENGTHSCALE,
                            input_scaling=IS_ARD) for i in range(1)
    ]

print("Using Kernel " + KERNEL)

m = autogp.GaussianProcess(likelihood,
                           kern,
                           Z,
                           num_samples=NUM_SAMPLES,
                           num_components=NUM_COMPONENTS)
error_rate = losses.ZeroOneLoss(data.Dout)
o = tf.train.AdamOptimizer(LEARNING_RATE)
m.fit(data,
      o,
      loo_steps=LOOCV_STEPS,