示例#1
0
    def set_up(self):
        # load data using kameleon-mcmc code
        logger.info("Loading data")
        X, y = GPData.get_glass_data()

        # normalise and whiten dataset, as done in kameleon-mcmc code
        logger.info("Whitening data")
        X -= np.mean(X, 0)
        L = np.linalg.cholesky(np.cov(X.T))
        X = sp.linalg.solve_triangular(L, X.T, lower=True).T

        # build target, as in kameleon-mcmc code
        self.gp_posterior = PseudoMarginalHyperparameters(X,
                                                          y,
                                                          self.n_importance,
                                                          self.prior,
                                                          self.ridge,
                                                          num_shogun_threads=1)
示例#2
0
    if len(sys.argv) != 3:
        print "usage:", str(sys.argv[0]).split(os.sep)[-1], "<experiment_dir_base> <number_of_experiments>"
        print "example:"
        print "python " + str(sys.argv[0]).split(os.sep)[-1] + " /nfs/nhome/live/ucabhst/kameleon_experiments/ 3"
        exit()
    
    experiment_dir_base = str(sys.argv[1])
    n = int(str(sys.argv[2]))
    
    # loop over parameters here
    
    experiment_dir = experiment_dir_base + str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
    print "running experiments", n, "times at base", experiment_dir
   
    # load data
    data,labels=GPData.get_glass_data()

    # normalise and whiten dataset
    data-=mean(data, 0)
    L=cholesky(cov(data.T))
    data=solve_triangular(L, data.T, lower=True).T
    dim=shape(data)[1]

    # prior on theta and posterior target estimate
    theta_prior=Gaussian(mu=0*ones(dim), Sigma=eye(dim)*5)
    distribution=PseudoMarginalHyperparameterDistribution(data, labels, \
                                                    n_importance=100, prior=theta_prior, \
                                                    ridge=1e-3)

    sigma = 23.0
    print "using sigma", sigma
  if len(sys.argv) != 3:
      print "usage:", str(sys.argv[0]).split(os.sep)[-1], "<experiment_dir_base> <number_of_experiments>"
      print "example:"
      print "python " + str(sys.argv[0]).split(os.sep)[-1] + " /nfs/nhome/live/ucabhst/kameleon_experiments/ 3"
      exit()
  
  experiment_dir_base = str(sys.argv[1])
  n = int(str(sys.argv[2]))
  
  # loop over parameters here
  
  experiment_dir = experiment_dir_base + str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
  print "running experiments", n, "times at base", experiment_dir
 
  # load data
  data,labels=GPData.get_mushroom_data()
  
  # throw away some data
  n=500
  idx=permutation(len(data))
  idx=idx[:n]
  data=data[idx]
  labels=labels[idx]
  
  dim=shape(data)[1]
  
  # prior on theta and posterior target estimate
  theta_prior=Gaussian(mu=0*ones(dim), Sigma=eye(dim)*5)
  distribution=PseudoMarginalHyperparameterDistributionDiffusion(data, labels, \
                                                  n_importance=100, prior=theta_prior, \
                                                  ridge=1e-3)
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from numpy.lib.twodim_base import eye
from numpy.linalg.linalg import cholesky
from numpy.ma.core import mean, ones, shape, asarray, zeros
from numpy.ma.extras import cov
from numpy.random import permutation, seed
from scipy.linalg.basic import solve_triangular
import os
import sys

if __name__ == '__main__':
    # load data
    data, labels = GPData.get_glass_data()

    # throw away some data
    n = 250
    seed(1)
    idx = permutation(len(data))
    idx = idx[:n]
    data = data[idx]
    labels = labels[idx]

    # normalise and whiten dataset
    data -= mean(data, 0)
    L = cholesky(cov(data.T))
    data = solve_triangular(L, data.T, lower=True).T
    dim = shape(data)[1]
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from numpy.lib.twodim_base import eye
from numpy.linalg.linalg import cholesky
from numpy.ma.core import mean, ones, shape, asarray, std, zeros
from numpy.ma.extras import cov
from numpy.random import permutation, seed
from scipy.linalg.basic import solve_triangular
import os
import sys
    
if __name__ == '__main__':
    # load data
    data,labels=GPData.get_madelon_data()

    # throw away some data
    n=750
    seed(1)
    idx=permutation(len(data))
    idx=idx[:n]
    data=data[idx]
    labels=labels[idx]
    
    # normalise dataset
    data-=mean(data, 0)
    data/=std(data,0)
    dim=shape(data)[1]

    # prior on theta and posterior target estimate
示例#6
0
from kameleon_mcmc.mcmc.output.PlottingOutput import PlottingOutput
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from matplotlib.pyplot import plot
from numpy.lib.twodim_base import eye
from numpy.ma.core import mean, std, ones, shape
from numpy.ma.extras import vstack, hstack
import os
import sys

if __name__ == '__main__':
    # sample data
    data_circle, labels_circle = GPData.sample_circle_data(n=40, seed_init=1)
    data_rect, labels_rect = GPData.sample_rectangle_data(n=60, seed_init=1)

    # combine
    data = vstack((data_circle, data_rect))
    labels = hstack((labels_circle, labels_rect))
    dim = shape(data)[1]

    # normalise data
    data -= mean(data, 0)
    data /= std(data, 0)

    # plot
    idx_a = labels > 0
    idx_b = labels < 0
    plot(data[idx_a, 0], data[idx_a, 1], "ro")
示例#7
0
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from numpy.lib.twodim_base import eye
from numpy.linalg.linalg import cholesky
from numpy.ma.core import mean, ones, shape, asarray
from numpy.ma.extras import cov
from numpy.random import permutation, seed
from scipy.linalg.basic import solve_triangular
import os
import sys

if __name__ == '__main__':
    # load data
    data, labels = GPData.get_pima_data()

    # throw away some data
    n = 250
    seed(1)
    idx = permutation(len(data))
    idx = idx[:n]
    data = data[idx]
    labels = labels[idx]

    # normalise and whiten dataset
    data -= mean(data, 0)
    L = cholesky(cov(data.T))
    data = solve_triangular(L, data.T, lower=True).T
    dim = shape(data)[1]
示例#8
0
        print "example:"
        print "python " + str(sys.argv[0]).split(
            os.sep)[-1] + " /nfs/nhome/live/ucabhst/kameleon_experiments/ 3"
        exit()

    experiment_dir_base = str(sys.argv[1])
    n = int(str(sys.argv[2]))

    # loop over parameters here

    experiment_dir = experiment_dir_base + str(os.path.abspath(
        sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
    print "running experiments", n, "times at base", experiment_dir

    # load data
    data, labels = GPData.get_mushroom_data()

    # throw away some data
    n = 500
    idx = permutation(len(data))
    idx = idx[:n]
    data = data[idx]
    labels = labels[idx]

    dim = shape(data)[1]

    # prior on theta and posterior target estimate
    theta_prior = Gaussian(mu=0 * ones(dim), Sigma=eye(dim) * 5)
    distribution=PseudoMarginalHyperparameterDistributionDiffusion(data, labels, \
                                                    n_importance=100, prior=theta_prior, \
                                                    ridge=1e-3)
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from numpy.lib.twodim_base import eye
from numpy.linalg.linalg import cholesky
from numpy.ma.core import mean, ones, shape, asarray
from numpy.ma.extras import cov
from numpy.random import permutation, seed
from scipy.linalg.basic import solve_triangular
import os
import sys
    
if __name__ == '__main__':
    # load data
    data,labels=GPData.get_pima_data()

    # throw away some data
    n=250
    seed(1)
    idx=permutation(len(data))
    idx=idx[:n]
    data=data[idx]
    labels=labels[idx]
    
    # normalise and whiten dataset
    data-=mean(data, 0)
    L=cholesky(cov(data.T))
    data=solve_triangular(L, data.T, lower=True).T
    dim=shape(data)[1]
from kameleon_mcmc.mcmc.output.PlottingOutput import PlottingOutput
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from matplotlib.pyplot import plot
from numpy.lib.twodim_base import eye
from numpy.ma.core import mean, std, ones, shape
from numpy.ma.extras import vstack, hstack
import os
import sys
    
if __name__ == '__main__':
    # sample data
    data_circle, labels_circle=GPData.sample_circle_data(n=40, seed_init=1)
    data_rect, labels_rect=GPData.sample_rectangle_data(n=60, seed_init=1)
    
    # combine
    data=vstack((data_circle, data_rect))
    labels=hstack((labels_circle, labels_rect))
    dim=shape(data)[1]
    
    # normalise data
    data-=mean(data, 0)
    data/=std(data,0)

    # plot
    idx_a=labels>0
    idx_b=labels<0
    plot(data[idx_a,0], data[idx_a,1],"ro")