예제 #1
0
def mnist8m_experiment(method,
                       components,
                       sparsity_factor,
                       run_id,
                       image=None,
                       n_threads=1,
                       partition_size=3000,
                       optimize_stochastic=True,
                       num_samples=1000,
                       max_iter=8000):
    """
    Run the mnist8m experiment.

    Parameters
    ----------
    method : str
        The method under which to run the experiment (mix1, mix2, or full).
    sparsity_factor : float
        The sparsity of inducing points.
    run_id : int
        The id of the configuration.
    """
    name = 'mnist8m'
    data = data_source.mnist8m_data()[run_id - 1]
    kernel = [
        ExtRBF(data['train_inputs'].shape[1],
               variance=11,
               lengthscale=np.array((9., )),
               ARD=False) for _ in range(10)
    ]
    cond_ll = likelihood.SoftmaxLL(10)
    transform = data_transformation.IdentityTransformation(
        data['train_inputs'], data['train_outputs'])

    return run_model.run_model(data['train_inputs'],
                               data['train_outputs'],
                               data['test_inputs'],
                               data['test_outputs'],
                               cond_ll,
                               kernel,
                               method,
                               components,
                               name,
                               data['id'],
                               sparsity_factor,
                               transform,
                               False,
                               False,
                               optimization_config={
                                   'mog': 60,
                                   'hyp': 15
                               },
                               num_samples=num_samples,
                               max_iter=max_iter,
                               n_threads=n_threads,
                               ftol=10,
                               model_image_dir=image,
                               partition_size=partition_size,
                               optimize_stochastic=optimize_stochastic)
예제 #2
0
def mnist_binary_inducing_experiment(method,
                                     sparsity_factor,
                                     run_id,
                                     image=None,
                                     n_threads=1,
                                     partition_size=3000,
                                     optimize_stochastic=False):
    """
    Run the binary mnist experiment with inducing point learning.

    Parameters
    ----------
    method : str
        The method under which to run the experiment (mix1, mix2, or full).
    sparsity_factor : float
        The sparsity of inducing points.
    run_id : int
        The id of the configuration.
    """
    name = 'mnist_binary'
    data = data_source.mnist_binary_data()[run_id - 1]
    kernel = [
        ExtRBF(data['train_inputs'].shape[1],
               variance=11,
               lengthscale=np.array((9., )),
               ARD=False)
    ]
    cond_ll = likelihood.LogisticLL()
    transform = data_transformation.IdentityTransformation(
        data['train_inputs'], data['train_outputs'])

    return run_model.run_model(data['train_inputs'],
                               data['train_outputs'],
                               data['test_inputs'],
                               data['test_outputs'],
                               cond_ll,
                               kernel,
                               method,
                               name,
                               data['id'],
                               sparsity_factor,
                               transform,
                               False,
                               False,
                               optimization_config={
                                   'mog': 60,
                                   'hyp': 15,
                                   'inducing': 6
                               },
                               max_iter=9,
                               n_threads=n_threads,
                               ftol=10,
                               model_image_dir=image,
                               partition_size=partition_size,
                               optimize_stochastic=optimize_stochastic)
예제 #3
0
def usps_experiment(method,
                    components,
                    sparsity_factor,
                    run_id,
                    optimize_stochastic=False):
    """
    Run the usps experiment.

    Parameters
    ----------
    method : str
        The method under which to run the experiment (mix1, mix2, or full).
    sparsity_factor : float
        The sparsity of inducing points.
    run_id : int
        The id of the configuration.
    """
    name = 'usps'
    data = data_source.usps_data()[run_id - 1]
    kernel = [
        ExtRBF(data['train_inputs'].shape[1],
               variance=2,
               lengthscale=np.array((4., )),
               ARD=False) for _ in range(3)
    ]
    cond_ll = likelihood.SoftmaxLL(3)
    transform = data_transformation.IdentityTransformation(
        data['train_inputs'], data['train_outputs'])

    return run_model.run_model(data['train_inputs'],
                               data['train_outputs'],
                               data['test_inputs'],
                               data['test_outputs'],
                               cond_ll,
                               kernel,
                               method,
                               components,
                               name,
                               data['id'],
                               sparsity_factor,
                               transform,
                               True,
                               False,
                               optimization_config={
                                   'mog': 25,
                                   'hyp': 25
                               },
                               max_iter=300)
예제 #4
0
def wisconsin_experiment(method,
                         components,
                         sparsity_factor,
                         run_id,
                         optimize_stochastic=False):
    """
    Run the wisconsin experiment.

    Parameters
    ----------
    method : str
        The method under which to run the experiment (mix1, mix2, or full).
    sparsity_factor : float
        The sparsity of inducing points.
    run_id : int
        The id of the configuration.
    """
    name = 'breast_cancer'
    data = data_source.wisconsin_breast_cancer_data()[run_id - 1]
    kernel = get_kernels(data['train_inputs'].shape[1], 1, False)
    cond_ll = likelihood.LogisticLL()
    transform = data_transformation.IdentityTransformation(
        data['train_inputs'], data['train_outputs'])

    return run_model.run_model(data['train_inputs'],
                               data['train_outputs'],
                               data['test_inputs'],
                               data['test_outputs'],
                               cond_ll,
                               kernel,
                               method,
                               components,
                               name,
                               data['id'],
                               sparsity_factor,
                               transform,
                               True,
                               False,
                               optimization_config={
                                   'mog': 25,
                                   'hyp': 25
                               },
                               max_iter=200)
예제 #5
0
def mining_experiment(method,
                      components,
                      sparsity_factor,
                      run_id,
                      optimize_stochastic=False):
    """
    Run the mining experiment.

    Parameters
    ----------
    method : str
        The method under which to run the experiment (mix1, mix2, or full).
    sparsity_factor : float
        The sparsity of inducing points.
    run_id : int
        The id of the configuration.
    """
    name = 'mining'
    data = data_source.mining_data()[run_id - 1]
    kernel = get_kernels(data['train_inputs'].shape[1], 1, False)
    cond_ll = likelihood.LogGaussianCox(np.log(191. / 811))
    transform = data_transformation.IdentityTransformation(
        data['train_inputs'], data['train_outputs'])
    kernel[0].variance = 1.0
    kernel[0].lengthscale = 13516.0

    return run_model.run_model(data['train_inputs'],
                               data['train_outputs'],
                               data['test_inputs'],
                               data['test_outputs'],
                               cond_ll,
                               kernel,
                               method,
                               components,
                               name,
                               data['id'],
                               sparsity_factor,
                               transform,
                               True,
                               True,
                               optimization_config={'mog': 15000},
                               max_iter=1)
예제 #6
0
def seismic_experiment(method,
                       components,
                       sparsity_factor,
                       run_id,
                       image=None,
                       n_threads=1,
                       partition_size=3000,
                       optimize_stochastic=False):
    name = 'seismic'
    data = data_source.seismic_data()[0]

    # prior_var = np.array([900, 5625, 57600, 108900, 38025, 52900, 75625, 133225])
    # prior_mu = [200, 500, 1600, 2200, 1950, 2300, 2750, 3650]
    # sigma2y = [0.0006, 0.0025, 0.0056, 0.0100]

    scale_factor = 10.  # for numerical reasons (final predicitons to be post-processed)
    mean_depth = np.array([200.0, 500.0, 1600.0, 2200.0], dtype=np.double)
    mean_vel = np.array([1950.0, 2300.0, 2750.0, 3650.0], dtype=np.double)
    std_depth = mean_depth * 0.15
    std_vel = mean_vel * 0.10
    prior_mu = np.hstack((mean_depth, mean_vel)) / scale_factor
    prior_var = np.square(np.hstack(
        (std_depth, std_vel))) / (scale_factor * scale_factor)
    sigma2y = np.square([0.025, 0.05, 0.075, 0.1])

    input_dim = data['train_inputs'].shape[1]

    kernel = [
        ExtRBF(input_dim,
               variance=prior_var[i],
               lengthscale=np.array((1, )),
               ARD=True) for i in range(len(prior_var))
    ]

    cond_ll = likelihood.SeismicLL(4, sigma2y)

    transform = data_transformation.IdentityTransformation(
        data['train_inputs'], data['train_outputs'])
    #transform = data_transformation.MeanStdXTransformation(data['train_inputs'], data['train_outputs'])

    return run_model.run_model(
        data['train_inputs'],
        data['train_outputs'],
        data['test_inputs'],
        data['test_outputs'],
        cond_ll,
        kernel,
        method,
        components,
        name,
        data['id'],
        sparsity_factor,
        transform,
        False,
        True,
        optimization_config={'mog': 100},
        # max_iter=10,
        partition_size=partition_size,
        # ftol=1,
        n_threads=n_threads,
        model_image_dir=image,
        GP_mean=prior_mu,
        init_var=0.001 * prior_var,
        num_samples=100000,
    )