Exemple #1
0
    def test_mode_newton_2d(self):
        X = asarray([-1, 1])
        X = reshape(X, (len(X), 1))
        y = asarray([+1 if x >= 0 else -1 for x in X])
        covariance = SquaredExponentialCovariance(sigma=1, scale=1)
        likelihood = LogitLikelihood()
        gp = GaussianProcess(y, X, covariance, likelihood)
        laplace = LaplaceApproximation(gp, newton_start=asarray([3, 3]))
        
        f_mode, _, steps = laplace.find_mode_newton(return_full=True)
        F = linspace(-10, 10, 20)
        D = zeros((len(F), len(F)))
        for i in range(len(F)):
            for j in range(len(F)):
                f = asarray([F[i], F[j]])
                D[i, j] = gp.log_posterior_unnormalised(f)
           
        idx = unravel_index(D.argmax(), D.shape)
        empirical_max = asarray([F[idx[0]], F[idx[1]]])
        
        pcolor(F, F, D)
        hold(True)
        plot(steps[:, 0], steps[:, 1])
        plot(f_mode[1], f_mode[0], 'mo', markersize=10)
        hold(False)
        colorbar()
        clf()
#        show()
           
        self.assertLessEqual(norm(empirical_max - f_mode), 1)
Exemple #2
0
    def test_get_gaussian_2d(self):
        X = asarray([-1, 1])
        X = reshape(X, (len(X), 1))
        y = asarray([+1 if x >= 0 else -1 for x in X])
        covariance = SquaredExponentialCovariance(sigma=1, scale=1)
        likelihood = LogitLikelihood()
        gp = GaussianProcess(y, X, covariance, likelihood)
        laplace = LaplaceApproximation(gp, newton_start=asarray([3, 3]))
        
        f_mode, L, steps = laplace.find_mode_newton(return_full=True)
        gaussian = laplace.get_gaussian(f_mode, L)
        F = linspace(-10, 10, 20)
        D = zeros((len(F), len(F)))
        Q = array(D, copy=True)
        for i in range(len(F)):
            for j in range(len(F)):
                f = asarray([F[i], F[j]])
                D[i, j] = gp.log_posterior_unnormalised(f)
                Q[i, j] = gaussian.log_pdf(f.reshape(1, len(f)))
        
        subplot(1, 2, 1)
        pcolor(F, F, D)
        hold(True)
        plot(steps[:, 0], steps[:, 1])
        plot(f_mode[1], f_mode[0], 'mo', markersize=10)
        hold(False)
        colorbar()
        subplot(1, 2, 2)
        pcolor(F, F, Q)
        hold(True)
        plot(f_mode[1], f_mode[0], 'mo', markersize=10)
        hold(False)
        colorbar()
#        show()
        clf()
Exemple #3
0
    def get_mushroom_data():
        data_dir = os.sep.join(__file__.split(os.sep)[:-3] + ["data"])
        filename = data_dir + os.sep + "agaricus-lepiota.data"
        f = open(filename)
        X = asarray(
            ["".join(x.strip(os.linesep).split(",")) for x in f.readlines()],
            dtype="c")
        f.close()

        # first column is labels
        labels = asarray(
            [+1. if X[i, 0] == "e" else -1. for i in range(len(X))])

        # remove attribute eleven which contains loads of missing values
        remove_idx = 11
        indices = hstack((arange(remove_idx), arange(remove_idx + 1,
                                                     X.shape[1])))
        X = X[:, indices[1:]]

        # generate a map of categorical to int
        cat2num = {}
        U = unique(X)
        for num in range(len(U)):
            cat2num[U[num]] = num

        # produce an integer representation of the categorical data
        X_int = asarray([[cat2num[X[i, j]] for i in range(X.shape[0])]
                         for j in range(X.shape[1])]).T

        return array(X_int, dtype=float64), labels
Exemple #4
0
 def get_mushroom_data():
     data_dir = os.sep.join(__file__.split(os.sep)[:-3] + ["data"])
     filename = data_dir + os.sep + "agaricus-lepiota.data"
     f = open(filename)
     X = asarray(["".join(x.strip(os.linesep).split(",")) for x in f.readlines()], dtype="c")
     f.close()
     
     # first column is labels
     labels=asarray([+1. if X[i,0]=="e" else -1. for i in range(len(X))])
     
     # remove attribute eleven which contains loads of missing values
     remove_idx = 11
     indices = hstack((arange(remove_idx), arange(remove_idx + 1, X.shape[1])))
     X = X[:, indices[1:]]
     
     # generate a map of categorical to int
     cat2num={}
     U=unique(X)
     for num in range(len(U)):
         cat2num[U[num]]=num
     
     # produce an integer representation of the categorical data
     X_int=asarray([[cat2num[X[i,j]] for i in range(X.shape[0])] for j in range(X.shape[1])]).T
     
     return array(X_int, dtype=float64) ,labels
Exemple #5
0
    def test_predict(self):
        # define some easy training data and predict predictive distribution
        circle1 = Ring(variance=1, radius=3)
        circle2 = Ring(variance=1, radius=10)
        
        n = 100
        X = circle1.sample(n / 2).samples
        X = vstack((X, circle2.sample(n / 2).samples))
        y = ones(n)
        y[:n / 2] = -1.0
        
#        plot(X[:n/2,0], X[:n/2,1], 'ro')
#        hold(True)
#        plot(X[n/2:,0], X[n/2:,1], 'bo')
#        hold(False)
#        show()

        covariance = SquaredExponentialCovariance(1, 1)
        likelihood = LogitLikelihood()
        gp = GaussianProcess(y, X, covariance, likelihood)

        # predict on mesh
        n_test = 20
        P = linspace(X[:, 0].min() - 1, X[:, 1].max() + 1, n_test)
        Q = linspace(X[:, 1].min() - 1, X[:, 1].max() + 1, n_test)
        X_test = asarray(list(itertools.product(P, Q)))
#        Y_test = exp(LaplaceApproximation(gp).predict(X_test).reshape(n_test, n_test))
        Y_train = exp(LaplaceApproximation(gp).predict(X))
        print Y_train
        
        print Y_train>0.5
        print y
Exemple #6
0
def main():
    Log.set_loglevel(logging.DEBUG)

    prior = Gaussian(Sigma=eye(2) * 100)

    posterior = OzonePosterior(prior,
                               logdet_alg="scikits",
                               solve_method="scikits")

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=5000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_average_serial"])
    store_chain_output = StoreChainOutput(folder)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
def main():
    # define the MCMC target distribution
    # possible distributions are in kameleon_mcmc.distribution: Banana, Flower, Ring
#    distribution = Banana(dimension=2, bananicity=0.03, V=100.0)
    distribution = Ring()
    
    # create instance of kameleon sampler that learns the length scale
    # can be replaced by any other sampler in kameleon_mcmc.mcmc.samplers
    kernel = GaussianKernel(sigma=5)
    mcmc_sampler = KameleonWindowLearnScale(distribution, kernel, stop_adapt=inf, nu2=0.05)
    
    # mcmc chain and its parameters
    start = asarray([0,-3])
    mcmc_params = MCMCParams(start=start, num_iterations=30000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
    # plot every iteration and print some statistics
    chain.append_mcmc_output(PlottingOutput(distribution, plot_from=2000))
    chain.append_mcmc_output(StatisticsOutput())
    
    # run cmcm
    chain.run()
    
    # print empirical quantiles
    burnin=10000
    print distribution.emp_quantiles(chain.samples[burnin:])
    
    Visualise.visualise_distribution(distribution, chain.samples)
def main():
    Log.set_loglevel(logging.DEBUG)
    
    prior = Gaussian(Sigma=eye(2) * 100)
    
    posterior = OzonePosterior(prior, logdet_alg="scikits",
                               solve_method="scikits")
    
    proposal_cov = diag([ 4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)
    
    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=5000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))
    
    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_average_serial"])
    store_chain_output = StoreChainOutput(folder)
    chain.append_mcmc_output(store_chain_output)
    
    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded
        
    chain.run()
    
    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
Exemple #9
0
 def test_subclasspreservation(self):
     # Checks that masked_array(...,subok=True) preserves the class.
     x = np.arange(5)
     m = [0, 0, 1, 0, 0]
     xinfo = [(i, j) for (i, j) in zip(x, m)]
     xsub = MSubArray(x, mask=m, info={'xsub': xinfo})
     #
     mxsub = masked_array(xsub, subok=False)
     assert_(not isinstance(mxsub, MSubArray))
     assert_(isinstance(mxsub, MaskedArray))
     assert_equal(mxsub._mask, m)
     #
     mxsub = asarray(xsub)
     assert_(not isinstance(mxsub, MSubArray))
     assert_(isinstance(mxsub, MaskedArray))
     assert_equal(mxsub._mask, m)
     #
     mxsub = masked_array(xsub, subok=True)
     assert_(isinstance(mxsub, MSubArray))
     assert_equal(mxsub.info, xsub.info)
     assert_equal(mxsub._mask, xsub._mask)
     #
     mxsub = asanyarray(xsub)
     assert_(isinstance(mxsub, MSubArray))
     assert_equal(mxsub.info, xsub.info)
     assert_equal(mxsub._mask, m)
    def find_mode_newton(self, return_full=False):
        """
        Newton search for mode of p(y|f)p(f)
        
        from GP book, algorithm 3.1, added step size
        """
        K = self.gp.K

        if self.newton_start is None:
            f = zeros(len(K))
        else:
            f = self.newton_start

        if return_full:
            steps = [f]

        iteration = 0
        norm_difference = inf
        objective_value = -inf

        while iteration < self.newton_max_iterations and norm_difference > self.newton_epsilon:
            # from GP book, algorithm 3.1, added step size
            # scale log_lik_grad_vector and K^-1 f = a

            w = -self.gp.likelihood.log_lik_hessian_vector(self.gp.y, f)
            w_sqrt = sqrt(w)

            # diag(w_sqrt).dot(K.dot(diag(w_sqrt))) == (K.T*w_sqrt).T*w_sqrt
            L = cholesky(eye(len(K)) + (K.T * w_sqrt).T * w_sqrt)
            b = f * w + self.newton_step * self.gp.likelihood.log_lik_grad_vector(self.gp.y, f)

            # a=b-diag(w_sqrt).dot(inv(eye(len(K)) + (K.T*w_sqrt).T*w_sqrt).dot(diag(w_sqrt).dot(K.dot(b))))
            a = w_sqrt * (K.dot(b))
            a = solve_triangular(L, a, lower=True)
            a = solve_triangular(L.T, a, lower=False)
            a = w_sqrt * a
            a = b - a

            f_new = K.dot(self.newton_step * a)

            # convergence stuff and next iteration
            objective_value_new = -0.5 * a.T.dot(f) + sum(self.gp.likelihood.log_lik_vector(self.gp.y, f))
            norm_difference = norm(f - f_new)

            if objective_value_new > objective_value:
                f = f_new
                if return_full:
                    steps.append(f)
            else:
                self.newton_step /= 2

            iteration += 1
            objective_value = objective_value_new

        self.computed = True

        if return_full:
            return f, L, asarray(steps)
        else:
            return f
Exemple #11
0
 def test_subclasspreservation(self):
     # Checks that masked_array(...,subok=True) preserves the class.
     x = np.arange(5)
     m = [0, 0, 1, 0, 0]
     xinfo = [(i, j) for (i, j) in zip(x, m)]
     xsub = MSubArray(x, mask=m, info={'xsub':xinfo})
     #
     mxsub = masked_array(xsub, subok=False)
     self.assertTrue(not isinstance(mxsub, MSubArray))
     self.assertTrue(isinstance(mxsub, MaskedArray))
     assert_equal(mxsub._mask, m)
     #
     mxsub = asarray(xsub)
     self.assertTrue(not isinstance(mxsub, MSubArray))
     self.assertTrue(isinstance(mxsub, MaskedArray))
     assert_equal(mxsub._mask, m)
     #
     mxsub = masked_array(xsub, subok=True)
     self.assertTrue(isinstance(mxsub, MSubArray))
     assert_equal(mxsub.info, xsub.info)
     assert_equal(mxsub._mask, xsub._mask)
     #
     mxsub = asanyarray(xsub)
     self.assertTrue(isinstance(mxsub, MSubArray))
     assert_equal(mxsub.info, xsub.info)
     assert_equal(mxsub._mask, m)
def main():
    Log.set_loglevel(logging.DEBUG)

    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 1000

    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_rr_sge"])

    # cluster admin set project jump for me to exclusively allocate nodes
    parameter_prefix = ""  # #$ -P jump"

    cluster_parameters = BatchClusterParameters(
        foldername=folder,
        memory=7.8,
        loglevel=logging.DEBUG,
        parameter_prefix=parameter_prefix,
        max_walltime=60 * 60 * 24 - 1)

    computation_engine = SGEComputationEngine(cluster_parameters,
                                              check_interval=10)

    rr_instance = RussianRoulette(1e-3, block_size=400)

    posterior = OzonePosteriorRREngine(rr_instance=rr_instance,
                                       computation_engine=computation_engine,
                                       num_estimates=num_estimates,
                                       prior=prior)

    posterior.logdet_method = "shogun_estimate"

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.55, -10.1])
    mcmc_params = MCMCParams(start=start, num_iterations=5000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    #    chain.append_mcmc_output(PlottingOutput(None, plot_from=1, lag=1))
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    store_chain_output = StoreChainOutput(folder, lag=1)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
Exemple #13
0
    def test_log_lik_vector_multiple2(self):
        n = 100
        y = randint(0, 2, n) * 2 - 1
        F = randn(10, n)

        lik = LogitLikelihood()
        multiples = lik.log_lik_vector_multiple(y, F)
        singles = asarray([lik.log_lik_vector(y, f) for f in F])

        self.assertLessEqual(norm(singles - multiples), 1e-10)
 def test_log_lik_vector_multiple2(self):
     n=100
     y=randint(0,2,n)*2-1
     F=randn(10,n)
     
     lik=LogitLikelihood()
     multiples=lik.log_lik_vector_multiple(y, F)
     singles=asarray([lik.log_lik_vector(y, f) for f in F])
     
     self.assertLessEqual(norm(singles-multiples), 1e-10)
def main():
    Log.set_loglevel(logging.DEBUG)

    modulename = "sample_ozone_posterior_average_slurm"
    if not FileSystem.cmd_exists("sbatch"):
        engine = SerialComputationEngine()
    else:
        johns_slurm_hack = "#SBATCH --partition=intel-ivy,wrkstn,compute"
        johns_slurm_hack = "#SBATCH --partition=intel-ivy,compute"

        folder = os.sep + os.sep.join(["nfs", "data3", "ucabhst", modulename])
        batch_parameters = BatchClusterParameters(
            foldername=folder,
            max_walltime=24 * 60 * 60,
            resubmit_on_timeout=False,
            memory=3,
            parameter_prefix=johns_slurm_hack)
        engine = SlurmComputationEngine(batch_parameters,
                                        check_interval=1,
                                        do_clean_up=True)

    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 100

    posterior = OzonePosteriorAverageEngine(computation_engine=engine,
                                            num_estimates=num_estimates,
                                            prior=prior)
    posterior.logdet_method = "shogun_estimate"

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=2000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    home = expanduser("~")
    folder = os.sep.join([home, modulename])
    store_chain_output = StoreChainOutput(folder)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
def main():
    Log.set_loglevel(logging.DEBUG)
    
    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 1000
    
    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_rr_sge"])
    
    # cluster admin set project jump for me to exclusively allocate nodes
    parameter_prefix = ""  # #$ -P jump"
    
    cluster_parameters = BatchClusterParameters(foldername=folder,
                                            memory=7.8,
                                            loglevel=logging.DEBUG,
                                            parameter_prefix=parameter_prefix,
                                            max_walltime=60 * 60 * 24 - 1)
        
    computation_engine = SGEComputationEngine(cluster_parameters, check_interval=10)
    
    rr_instance = RussianRoulette(1e-3, block_size=400)
    
    posterior = OzonePosteriorRREngine(rr_instance=rr_instance,
                                       computation_engine=computation_engine,
                                       num_estimates=num_estimates,
                                       prior=prior)
    
    posterior.logdet_method = "shogun_estimate"
    
    proposal_cov = diag([ 4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)
    
    start = asarray([-11.55, -10.1])
    mcmc_params = MCMCParams(start=start, num_iterations=5000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
#    chain.append_mcmc_output(PlottingOutput(None, plot_from=1, lag=1))
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))
    
    store_chain_output = StoreChainOutput(folder, lag=1)
    chain.append_mcmc_output(store_chain_output)
    
    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded
        
    chain.run()
    
    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
 def __init__(self, mu=asarray([0, 0]), Sigma=eye(2), is_cholesky=False):
     DensityFunction.__init__(self, len(Sigma))
     
     assert(len(shape(mu)) == 1)
     assert(max(shape(Sigma)) == len(mu))
     self.mu = mu
     if is_cholesky: 
         self.L = Sigma
     else: 
         assert(shape(Sigma)[0] == shape(Sigma)[1])
         self.L = cholesky(Sigma)
def main():
    Log.set_loglevel(logging.DEBUG)
    
    modulename = "sample_ozone_posterior_average_slurm"
    if not FileSystem.cmd_exists("sbatch"):
        engine = SerialComputationEngine()
    else:
        johns_slurm_hack = "#SBATCH --partition=intel-ivy,wrkstn,compute"
        johns_slurm_hack = "#SBATCH --partition=intel-ivy,compute"
        
        folder = os.sep + os.sep.join(["nfs", "data3", "ucabhst", modulename])
        batch_parameters = BatchClusterParameters(foldername=folder, max_walltime=24 * 60 * 60,
                                                  resubmit_on_timeout=False, memory=3,
                                                  parameter_prefix=johns_slurm_hack)
        engine = SlurmComputationEngine(batch_parameters, check_interval=1,
                                do_clean_up=True)
    
    
    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 100
    
    posterior = OzonePosteriorAverageEngine(computation_engine=engine,
                                        num_estimates=num_estimates,
                                        prior=prior)
    posterior.logdet_method = "shogun_estimate"
    
    proposal_cov = diag([ 4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)
    
    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=2000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))
    
    home = expanduser("~")
    folder = os.sep.join([home, modulename])
    store_chain_output = StoreChainOutput(folder)
    chain.append_mcmc_output(store_chain_output)
    
    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded
        
    chain.run()
    
    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
Exemple #19
0
    def test_log_lik_multiple2(self):
        n = 3
        y = randint(0, 2, n) * 2 - 1
        F = randn(10, n)

        X = randn(n, 2)
        covariance = SquaredExponentialCovariance(sigma=1, scale=1)
        likelihood = LogitLikelihood()
        gp = GaussianProcess(y, X, covariance, likelihood)

        singles = asarray([gp.log_likelihood(f) for f in F])
        multiples = gp.log_likelihood_multiple(F)

        self.assertLessEqual(norm(singles - multiples), 1e-10)
 def test_log_mean_exp(self):
     X = asarray([-1, 1])
     X = reshape(X, (len(X), 1))
     y = asarray([+1. if x >= 0 else -1. for x in X])
     covariance = SquaredExponentialCovariance(sigma=1, scale=1)
     likelihood = LogitLikelihood()
     gp = GaussianProcess(y, X, covariance, likelihood)
     laplace = LaplaceApproximation(gp, newton_start=asarray([3, 3]))
     proposal=laplace.get_gaussian()
     
     n=200
     prior = gp.get_gp_prior()
     samples = proposal.sample(n).samples
     
     log_likelihood=asarray([gp.log_likelihood(f) for f in samples])
     log_prior = prior.log_pdf(samples)
     log_proposal = proposal.log_pdf(samples)
     
     X=log_likelihood+log_prior-log_proposal
     
     a=log(mean(exp(X)))
     b=GPTools.log_mean_exp(X)
     
     self.assertLessEqual(a-b, 1e-5)
Exemple #21
0
    def test_log_mean_exp(self):
        X = asarray([-1, 1])
        X = reshape(X, (len(X), 1))
        y = asarray([+1. if x >= 0 else -1. for x in X])
        covariance = SquaredExponentialCovariance(sigma=1, scale=1)
        likelihood = LogitLikelihood()
        gp = GaussianProcess(y, X, covariance, likelihood)
        laplace = LaplaceApproximation(gp, newton_start=asarray([3, 3]))
        proposal = laplace.get_gaussian()

        n = 200
        prior = gp.get_gp_prior()
        samples = proposal.sample(n).samples

        log_likelihood = asarray([gp.log_likelihood(f) for f in samples])
        log_prior = prior.log_pdf(samples)
        log_proposal = proposal.log_pdf(samples)

        X = log_likelihood + log_prior - log_proposal

        a = log(mean(exp(X)))
        b = GPTools.log_mean_exp(X)

        self.assertLessEqual(a - b, 1e-5)
def main():
    Log.set_loglevel(logging.DEBUG)

    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 2

    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_rr_sge"])

    computation_engine = SerialComputationEngine()

    rr_instance = RussianRoulette(1e-3, block_size=10)

    posterior = OzonePosteriorRREngine(rr_instance=rr_instance,
                                       computation_engine=computation_engine,
                                       num_estimates=num_estimates,
                                       prior=prior)

    posterior.logdet_method = "shogun_estimate"

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=200)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    #    chain.append_mcmc_output(PlottingOutput(None, plot_from=1, lag=1))
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    store_chain_output = StoreChainOutput(folder, lag=50)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
def main():
    Log.set_loglevel(logging.DEBUG)

    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 2

    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_rr_sge"])

    computation_engine = SerialComputationEngine()

    rr_instance = RussianRoulette(1e-3, block_size=10)

    posterior = OzonePosteriorRREngine(
        rr_instance=rr_instance, computation_engine=computation_engine, num_estimates=num_estimates, prior=prior
    )

    posterior.logdet_method = "shogun_estimate"

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=200)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    #    chain.append_mcmc_output(PlottingOutput(None, plot_from=1, lag=1))
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    store_chain_output = StoreChainOutput(folder, lag=50)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
Exemple #24
0
 def sample_circle_data(n, noise_level=0.025, offset=0.05, seed_init=None):
     if seed_init is not None:
         seed(seed_init)
     
     # decision surface for sampled data
     thetas = linspace(0, pi / 2, n)
     X = sin(thetas) * (1 + randn(n) * noise_level)
     Y = cos(thetas) * (1 + randn(n) * noise_level)
     
     # randomly select labels and distinguish data
     labels = randint(0, 2, n) * 2 - 1
     idx_a = labels > 0
     idx_b = labels < 0
     X[idx_a] *= (1. + offset)
     Y[idx_a] *= (1. + offset)
     X[idx_b] *= (1. - offset)
     Y[idx_b] *= (1. - offset)
     
     return asarray(zip(X, Y)), labels
Exemple #25
0
    def sample_circle_data(n, noise_level=0.025, offset=0.05, seed_init=None):
        if seed_init is not None:
            seed(seed_init)

        # decision surface for sampled data
        thetas = linspace(0, pi / 2, n)
        X = sin(thetas) * (1 + randn(n) * noise_level)
        Y = cos(thetas) * (1 + randn(n) * noise_level)

        # randomly select labels and distinguish data
        labels = randint(0, 2, n) * 2 - 1
        idx_a = labels > 0
        idx_b = labels < 0
        X[idx_a] *= (1. + offset)
        Y[idx_a] *= (1. + offset)
        X[idx_b] *= (1. - offset)
        Y[idx_b] *= (1. - offset)

        return asarray(zip(X, Y)), labels
Exemple #26
0
 def test_log_sum_exp(self):
     X = asarray([0.1, 0.2, 0.3, 0.4])
     direct = log(sum(exp(X)))
     indirect = GPTools.log_sum_exp(X)
     self.assertLessEqual(norm(direct - indirect), 1e-10)
 def log_pdf_multiple_points(self, X):
     """
     x is a 2d array
     """
     return asarray([self.log_pdf(x) for x in X])
    # prior on theta and posterior target estimate
    theta_prior=Gaussian(mu=0*ones(dim), Sigma=eye(dim)*5)
    target=PseudoMarginalHyperparameterDistribution(data, labels, \
                                                    n_importance=100, prior=theta_prior, \
                                                    ridge=1e-3)
    
    # create sampler
    burnin=10000
    num_iterations=burnin+300000
    kernel = GaussianKernel(sigma=22.0)
    sampler=KameleonWindowLearnScale(target, kernel, stop_adapt=burnin)
#    sampler=AdaptiveMetropolisLearnScale(target)
#    sampler=StandardMetropolis(target)
    
    # posterior mode derived by initial tests
    start=asarray([-1., -0.5, -4., -3., -3, -3, -2, -1])
    params = MCMCParams(start=start, num_iterations=num_iterations, burnin=burnin)
    
    # create MCMC chain
    chain=MCMCChain(sampler, params)
    chain.append_mcmc_output(StatisticsOutput(print_from=0, lag=100))
#    chain.append_mcmc_output(PlottingOutput(plot_from=0, lag=500))
    
    # create experiment instance to store results
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
    experiment = SingleChainExperiment(chain, experiment_dir)
    
    experiment.run()
    
    sigma=GaussianKernel.get_sigma_median_heuristic(experiment.mcmc_chain.samples.T)
    print "median kernel width", sigma
    def graphlab_results(self):
        """
        Based on the fixed data from above, calls graphlab and asserts that it
        outputs some fixed beta coefficients after convergence.
        This is a nasty unit test which compiles graphlab and runs it locally.
        """

        # run graphlab as system call from folder within
        folder = (
            ".."
            + os.sep
            + ".."
            + os.sep
            + ".."
            + os.sep
            + ".."
            + os.sep
            + "debug"
            + os.sep
            + "apps"
            + os.sep
            + "kernelbp"
            + os.sep
        )

        # cd to kernelbp binary directory
        os.chdir(folder)

        # compile kernelbp in case it hasnt been done yet
        self.assertEqual(subprocess.call(["make"]), 0)

        # run command:
        # mpiexec -n 1 kernelbp --graph_filename ../../../apps/kernelbp/python_precompute_kernel_matrices/unitttests/graph_test1/graph.txt --output_filename outfile.txt --engine sync
        command = "mpiexec"
        args = [
            "-n",
            "1",
            "kernelbp",
            "--graph_filename",
            "../../../apps/kernelbp/python_precompute_kernel_matrices/graph_test1/graph.txt",
        ]
        self.assertEqual(subprocess.call([command] + args), 0)

        # open result file
        try:
            f = open("outfile.txt")
            lines = [line.strip() for line in f.readlines()]
            f.close()
        except IOError:
            self.assertTrue(False, "Graphlab output file could not be opened.")

        # read betas
        betas = {}
        idx = 0
        while idx < len(lines):
            # cut off () of the edge pair of graphlab output
            vertices = lines[idx].strip(")").strip("(").split(",")

            # read all lines until next vector or end
            idx += 1
            temp = []
            while idx < len(lines) and lines[idx][0] != "(":
                temp.append(float(lines[idx]))
                idx += 1

            betas[(int(vertices[0]), int(vertices[1]))] = asarray(temp)

        # matlab results
        reference_betas = {}
        reference_betas[(2, 1)] = asarray([0.667559, 0.298557, -0.682077])
        reference_betas[(3, 1)] = asarray([0.789823, 0.045293, -0.611660])
        reference_betas[(1, 2)] = asarray([0.770158, -0.621991, 0.141367])
        reference_betas[(3, 2)] = asarray([0.885537, -0.310062, -0.345956])
        reference_betas[(1, 3)] = asarray([0.808071, -0.583016, 0.084344])
        reference_betas[(2, 3)] = asarray([0.821792, 0.072258, -0.565188])
        reference_betas[(5, 3)] = asarray([0.743932, -0.011267, -0.668161])
        reference_betas[(2, 4)] = asarray([0.388615, 0.532370, -0.752037])
        reference_betas[(3, 4)] = asarray([0.414404, 0.483776, -0.770863])
        reference_betas[(3, 5)] = asarray([0.746400, 0.609564, 0.267055])

        # assert that these are cloe to graphlab
        for edge, beta in reference_betas.items():
            difference = norm(beta - betas[edge])
            self.assertLessEqual(difference, 1.5e-2)
 def log_lik_hessian_vector(self, y, f):
     s = asarray([min(x, 0) for x in f])
     d2lp = -exp(2 * s - f) / (exp(s) + exp(s - f)) ** 2
     return d2lp
 def log_lik_vector(self, y, f):
     s = -y * f
     ps = asarray([min(x, 0) for x in s])
     lp = -(ps + log(exp(-ps) + exp(s - ps)))
     return lp
Exemple #32
0
 def log_likelihood_multiple(self, F):
     all = self.likelihood.log_lik_vector_multiple(self.y, F)
     return asarray([sum(a) for a in all])
 def test_log_sum_exp(self):
     X=asarray([0.1,0.2,0.3,0.4])
     direct=log(sum(exp(X)))
     indirect=GPTools.log_sum_exp(X)
     self.assertLessEqual(norm(direct-indirect), 1e-10)
 def precompute_likelihood_estimates(self, tau, kappa):
     logging.debug("Entering")
     estimates = asarray([OzonePosterior.log_likelihood(self, tau, kappa) for _ in range(self.num_estimates)])
     logging.debug("Leaving")
     return estimates
 def log_lik_grad_vector(self, y, f):
     s = asarray([min(x, 0) for x in f])
     p = exp(s) * (exp(s) + exp(s - f))
     dlp = (y + 1) / 2 - p
     return dlp
 def log_likelihood_multiple(self, F):
     all=self.likelihood.log_lik_vector_multiple(self.y, F)
     return asarray([sum(a) for a in all])
Exemple #37
0
 def log_lik_vector_multiple(self, y, F):
     return asarray([self.log_lik_vector(y, f) for f in F])
 def log_lik_vector_multiple(self, y, F):
     S = -y * F
     PS = asarray([asarray([min(x, 0) for x in s]) for s in S])
     LP = -(PS + log(exp(-PS) + exp(S - PS)))
     return LP
    def fixed_data(self):
        """
        Uses some fixed data from the ToyModel and pre-computes all matrices.
        Results are asserted to be correct (against Matlab implementation) and
        output files can be used to test the KernelBP implementation.
        """
        model = ToyModel()
        graph = model.get_moralised_graph()

        # one observation at node 4
        observations = {4: 0.0}

        # directed edges for kernel BP implementation
        edges = model.extract_edges(observations)

        print "graph:", graph
        print "observations:", observations
        print "edges:", edges

        # we sample the data jointly, so edges will share data along vertices
        joint_data = {}
        joint_data[1] = [-0.274722354853981, 0.044011207316815, 0.073737451640458]
        joint_data[2] = [-0.173264814517908, 0.213918664844409, 0.123246012188621]
        joint_data[3] = [-0.348879413536605, -0.081766464397055, -0.117171083361484]
        joint_data[4] = [-0.014012058355118, -0.145789276405117, -0.317649695308685]
        joint_data[5] = [-0.291794859908481, 0.260902212951398, -0.276258182225143]

        # generate data in format that works for the dense matrix class, i.e., a pair of
        # points for every edge
        data = {}
        for edge in edges:
            # only sample once per undirected edge
            inverse_edge = (edge[1], edge[0])
            data[edge] = (joint_data[edge[0]], joint_data[edge[1]])
            data[inverse_edge] = (joint_data[edge[1]], joint_data[edge[0]])

        # Gaussian kernel used in matlab files
        kernel = GaussianKernel(sigma=sqrt(0.15))

        # use the example class for dense matrix data that can be stored in memory

        precomputer = PrecomputeDenseMatrixKernelBP(
            graph, edges, data, observations, kernel, reg_lambda=0.1, output_filename=self.output_filename
        )

        precomputer.precompute()

        # go through all the files and make sure they contain the correct matrices

        # files created by matlab implementation
        filenames = [
            "1->2->3_non_obs_kernel.txt",
            "1->2->4_non_obs_kernel.txt",
            "1->3->2_non_obs_kernel.txt",
            "1->3->4_non_obs_kernel.txt",
            "1->3->5_non_obs_kernel.txt",
            "2->1->3_non_obs_kernel.txt",
            "2->3->1_non_obs_kernel.txt",
            "2->3->4_non_obs_kernel.txt",
            "2->3->5_non_obs_kernel.txt",
            "2->4->3_non_obs_kernel.txt",
            "3->1->2_non_obs_kernel.txt",
            "3->2->1_non_obs_kernel.txt",
            "3->2->4_non_obs_kernel.txt",
            "3->4->2_non_obs_kernel.txt",
            "4->2->1_non_obs_kernel.txt",
            "4->2->3_non_obs_kernel.txt",
            "4->3->1_non_obs_kernel.txt",
            "4->3->2_non_obs_kernel.txt",
            "4->3->5_non_obs_kernel.txt",
            "5->3->1_non_obs_kernel.txt",
            "5->3->2_non_obs_kernel.txt",
            "5->3->4_non_obs_kernel.txt",
            "3->4_obs_kernel.txt",
            "2->4_obs_kernel.txt",
            "1->2_L_s.txt",
            "1->3_L_s.txt",
            "2->1_L_s.txt",
            "2->3_L_s.txt",
            "2->4_L_s.txt",
            "2->4_L_t.txt",
            "3->1_L_s.txt",
            "3->2_L_s.txt",
            "3->4_L_s.txt",
            "3->4_L_t.txt",
            "3->5_L_s.txt",
            "5->3_L_s.txt",
        ]

        # from matlab implementation
        matrices = {
            "2->1->3_non_obs_kernel.txt": asarray(
                [[1.000000, 0.712741, 0.667145], [0.712741, 1.000000, 0.997059], [0.667145, 0.997059, 1.000000]]
            ),
            "3->1->2_non_obs_kernel.txt": asarray(
                [[1.000000, 0.712741, 0.667145], [0.712741, 1.000000, 0.997059], [0.667145, 0.997059, 1.000000]]
            ),
            "1->2->3_non_obs_kernel.txt": asarray(
                [[1.000000, 0.606711, 0.745976], [0.606711, 1.000000, 0.972967], [0.745976, 0.972967, 1.000000]]
            ),
            "1->2->4_non_obs_kernel.txt": asarray(
                [[1.000000, 0.606711, 0.745976], [0.606711, 1.000000, 0.972967], [0.745976, 0.972967, 1.000000]]
            ),
            "3->2->1_non_obs_kernel.txt": asarray(
                [[1.000000, 0.606711, 0.745976], [0.606711, 1.000000, 0.972967], [0.745976, 0.972967, 1.000000]]
            ),
            "3->2->4_non_obs_kernel.txt": asarray(
                [[1.000000, 0.606711, 0.745976], [0.606711, 1.000000, 0.972967], [0.745976, 0.972967, 1.000000]]
            ),
            "4->2->1_non_obs_kernel.txt": asarray(
                [[1.000000, 0.606711, 0.745976], [0.606711, 1.000000, 0.972967], [0.745976, 0.972967, 1.000000]]
            ),
            "4->2->3_non_obs_kernel.txt": asarray(
                [[1.000000, 0.606711, 0.745976], [0.606711, 1.000000, 0.972967], [0.745976, 0.972967, 1.000000]]
            ),
            "1->3->2_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "1->3->4_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "1->3->5_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "2->3->1_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "2->3->4_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "2->3->5_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "4->3->1_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "4->3->2_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "4->3->5_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "5->3->1_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "5->3->2_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "5->3->4_non_obs_kernel.txt": asarray(
                [[1.000000, 0.788336, 0.836137], [0.788336, 1.000000, 0.995830], [0.836137, 0.995830, 1.000000]]
            ),
            "2->4->3_non_obs_kernel.txt": asarray(
                [[1.000000, 0.943759, 0.735416], [0.943759, 1.000000, 0.906238], [0.735416, 0.906238, 1.000000]]
            ),
            "3->4->2_non_obs_kernel.txt": asarray(
                [[1.000000, 0.943759, 0.735416], [0.943759, 1.000000, 0.906238], [0.735416, 0.906238, 1.000000]]
            ),
            "2->4_obs_kernel.txt": asarray([[0.999346], [0.931603], [0.714382]]),
            "2->4_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.578476, 0.874852, 0.000000], [0.711260, 0.641846, 0.426782]]
            ),
            "2->4_L_t.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.899839, 0.538785, 0.000000], [0.701191, 0.510924, 0.589311]]
            ),
            "3->4_obs_kernel.txt": asarray([[0.999346], [0.931603], [0.714382]]),
            "3->4_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.751649, 0.731453, 0.000000], [0.797226, 0.542204, 0.412852]]
            ),
            "3->4_L_t.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.899839, 0.538785, 0.000000], [0.701191, 0.510924, 0.589311]]
            ),
            "2->1_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.578476, 0.874852, 0.000000], [0.711260, 0.641846, 0.426782]]
            ),
            "3->1_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.751649, 0.731453, 0.000000], [0.797226, 0.542204, 0.412852]]
            ),
            "1->2_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.679572, 0.798863, 0.000000], [0.636098, 0.706985, 0.442211]]
            ),
            "3->2_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.751649, 0.731453, 0.000000], [0.797226, 0.542204, 0.412852]]
            ),
            "1->3_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.679572, 0.798863, 0.000000], [0.636098, 0.706985, 0.442211]]
            ),
            "2->3_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.578476, 0.874852, 0.000000], [0.711260, 0.641846, 0.426782]]
            ),
            "5->3_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.344417, 0.990645, 0.000000], [0.952696, 0.054589, 0.435191]]
            ),
            "3->5_L_s.txt": asarray(
                [[1.048809, 0.000000, 0.000000], [0.751649, 0.731453, 0.000000], [0.797226, 0.542204, 0.412852]]
            ),
        }

        assert len(filenames) == len(matrices)

        for filename in filenames:
            self.assertTrue(self.assert_file_matrix(self.output_folder + filename, matrices[filename]))
 def log_lik_vector_multiple(self, y, F):
     S = -y * F
     PS = asarray([asarray([min(x, 0) for x in s]) for s in S])
     LP = -(PS + log(exp(-PS) + exp(S - PS)))
     return LP
 def log_lik_vector(self, y, f):
     s = -y * f
     ps = asarray([min(x, 0) for x in s])
     lp = -(ps + log(exp(-ps) + exp(s - ps)))
     return lp
    def find_mode_newton(self, return_full=False):
        """
        Newton search for mode of p(y|f)p(f)
        
        from GP book, algorithm 3.1, added step size
        """
        K = self.gp.K

        if self.newton_start is None:
            f = zeros(len(K))
        else:
            f = self.newton_start

        if return_full:
            steps = [f]

        iteration = 0
        norm_difference = inf
        objective_value = -inf

        while iteration < self.newton_max_iterations and norm_difference > self.newton_epsilon:
            # from GP book, algorithm 3.1, added step size
            # scale log_lik_grad_vector and K^-1 f = a

            w = -self.gp.likelihood.log_lik_hessian_vector(self.gp.y, f)
            w_sqrt = sqrt(w)

            # diag(w_sqrt).dot(K.dot(diag(w_sqrt))) == (K.T*w_sqrt).T*w_sqrt
            L = cholesky(eye(len(K)) + (K.T * w_sqrt).T * w_sqrt)
            b = f * w + self.newton_step * \
                self.gp.likelihood.log_lik_grad_vector(self.gp.y, f)

            # a=b-diag(w_sqrt).dot(inv(eye(len(K)) + (K.T*w_sqrt).T*w_sqrt).dot(diag(w_sqrt).dot(K.dot(b))))
            a = (w_sqrt * (K.dot(b)))
            a = solve_triangular(L, a, lower=True)
            a = solve_triangular(L.T, a, lower=False)
            a = w_sqrt * a
            a = b - a

            f_new = K.dot(self.newton_step * a)

            # convergence stuff and next iteration
            objective_value_new = -0.5 * a.T.dot(f) + \
                                sum(self.gp.likelihood.log_lik_vector(self.gp.y, f))
            norm_difference = norm(f - f_new)

            if objective_value_new > objective_value:
                f = f_new
                if return_full:
                    steps.append(f)
            else:
                self.newton_step /= 2

            iteration += 1
            objective_value = objective_value_new

        self.computed = True

        if return_full:
            return f, L, asarray(steps)
        else:
            return f
 def log_lik_grad_vector(self, y, f):
     s = asarray([min(x, 0) for x in f])
     p = exp(s) * (exp(s) + exp(s - f));
     dlp = (y + 1) / 2 - p
     return dlp
Exemple #44
0
 def log_lik_vector_multiple(self, y, F):
     return asarray([self.log_lik_vector(y, f) for f in F])
Exemple #45
0
    # prior on theta and posterior target estimate
    theta_prior = Gaussian(mu=0 * ones(dim), Sigma=eye(dim) * 5)
    target=PseudoMarginalHyperparameterDistribution(data, labels, \
                                                    n_importance=100, prior=theta_prior, \
                                                    ridge=1e-3)

    # create sampler
    burnin = 10000
    num_iterations = burnin + 300000
    kernel = GaussianKernel(sigma=22.0)
    sampler = KameleonWindowLearnScale(target, kernel, stop_adapt=burnin)
    #    sampler=AdaptiveMetropolisLearnScale(target)
    #    sampler=StandardMetropolis(target)

    # posterior mode derived by initial tests
    start = asarray([-1., -0.5, -4., -3., -3, -3, -2, -1])
    params = MCMCParams(start=start,
                        num_iterations=num_iterations,
                        burnin=burnin)

    # create MCMC chain
    chain = MCMCChain(sampler, params)
    chain.append_mcmc_output(StatisticsOutput(print_from=0, lag=100))
    #    chain.append_mcmc_output(PlottingOutput(plot_from=0, lag=500))

    # create experiment instance to store results
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(
        os.sep)[-1].split(".")[0] + os.sep
    experiment = SingleChainExperiment(chain, experiment_dir)

    experiment.run()
 def log_lik_hessian_vector(self, y, f):
     s = asarray([min(x, 0) for x in f])
     d2lp = -exp(2 * s - f) / (exp(s) + exp(s - f))**2
     return d2lp