コード例 #1
0
    def sample_conditional(self, index):
        if index < 0 or index >= self.dimension:
            raise ValueError("Conditional index out of bounds")
        
        # all indices but the current
        cond_inds = hstack((arange(0, index), arange(index + 1, self.dimension)))
#         print "conditioning on index %d" % index
#         print "other indices:", cond_inds
        
        # partition the Gaussian x|y, precompute matrix inversion
        mu_x = self.full_target.mu[index]
        Sigma_xx = self.full_Sigma[index, index]
        mu_y = self.full_target.mu[cond_inds]
        Sigma_yy = self.full_Sigma[cond_inds, cond_inds].reshape(len(cond_inds), len(cond_inds))
        L_yy = cholesky(Sigma_yy)
        Sigma_xy = self.full_Sigma[index, cond_inds]
        Sigma_yx = self.full_Sigma[cond_inds, index]
        
        y = self.current_state[cond_inds]
        
        # mu=mu_x+Sigma_xy Sigma_yy^(-1)(y-mu_y)
        mu = mu_x + Sigma_xy.dot(MatrixTools.cholesky_solve(L_yy, y - mu_y))
        
        # solve Sigma=Sigma_xx-Sigma_yy^-1 Sigma_yx=Sigma_xy-Sigma_xy L_yy^(-T)_yy^(-1) Sigma_yx
        Sigma = Sigma_xx - Sigma_xy.dot(MatrixTools.cholesky_solve(L_yy, Sigma_yx))
        
        # return sample from x|y
        conditional_sample = randn() * sqrt(Sigma) + mu
        return conditional_sample
コード例 #2
0
def main():
    # covariance has stretched Eigenvalues, and rotated basis
    Sigma = eye(2)
    Sigma[0, 0] = 30
    Sigma[1, 1] = 1
    theta = -pi / 4
    U = MatrixTools.rotation_matrix(theta)
    Sigma = U.T.dot(Sigma).dot(U)

    gaussian = Gaussian(Sigma=Sigma)
    distribution = GaussianFullConditionals(gaussian, [0., 0.])

    mcmc_sampler = Gibbs(distribution)

    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=20000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(plot_times=True, lag=100))
    chain.append_mcmc_output(
        PlottingOutput(distribution=gaussian,
                       plot_from=1,
                       colour_by_likelihood=False,
                       num_samples_plot=0,
                       lag=100))
    chain.run()
コード例 #3
0
    def __init__(self,
                 mu=array([0, 0]),
                 Sigma=eye(2),
                 is_cholesky=False,
                 ell=None):
        Distribution.__init__(self, len(Sigma))

        assert (len(shape(mu)) == 1)
        assert (max(shape(Sigma)) == len(mu))
        self.mu = mu
        self.ell = ell
        if is_cholesky:
            self.L = Sigma
            if ell == None:
                assert (shape(Sigma)[0] == shape(Sigma)[1])
            else:
                assert (shape(Sigma)[1] == ell)
        else:
            assert (shape(Sigma)[0] == shape(Sigma)[1])
            if ell is not None:
                self.L, _, _ = MatrixTools.low_rank_approx(Sigma, ell)
                self.L = self.L.T
                assert (shape(self.L)[1] == ell)
            else:
                try:
                    self.L = cholesky(Sigma)
                except LinAlgError:
                    # some really crude check for PSD (which only corrects for orunding errors
                    self.L = cholesky(Sigma + eye(len(Sigma)) * 1e-5)
コード例 #4
0
def main():
    # covariance has stretched Eigenvalues, and rotated basis
    Sigma = eye(2)
    Sigma[0, 0] = 30
    Sigma[1, 1] = 1
    theta = -pi / 4
    U = MatrixTools.rotation_matrix(theta)
    Sigma = U.T.dot(Sigma).dot(U)
    
    gaussian = Gaussian(Sigma=Sigma)
    distribution = GaussianFullConditionals(gaussian, [0., 0.])
    
    mcmc_sampler = Gibbs(distribution)
    
    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=20000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
    chain.append_mcmc_output(StatisticsOutput(plot_times=True, lag=100))
    chain.append_mcmc_output(PlottingOutput(distribution=gaussian, plot_from=1,
                                            colour_by_likelihood=False,
                                            num_samples_plot=0, lag=100))
    chain.run()
コード例 #5
0
def main():
    # covariance has stretched Eigenvalues, and rotated basis
    Sigma1 = eye(2)
    Sigma1[0, 0] = 30.0
    Sigma1[1, 1] = 1.0
    Sigma2 = Sigma1
    Sigma2[0, 0] = 20.0
    theta = -pi / 4
    U = MatrixTools.rotation_matrix(theta)
    Sigma1 = U.T.dot(Sigma1).dot(U)
    Sigma2 = U.T.dot(Sigma2).dot(U)
    
    gaussian1 = Gaussian(Sigma=Sigma1)
    gaussian2 = Gaussian(mu=array([1., 0.]), Sigma=Sigma1)
    numTrials=500
    vanillap=empty((numTrials,2,2))
    blockp=empty((numTrials,2,2))
    wildp=empty((numTrials,2,2))
    
    #     f = open("/home/dino/git/test_results.dat", "r")
    #     vanillarej=load(f)
    #     blockrej=load(f)
    #     wildrej=load(f)
    #     f.close()
    #     
    for i in range(numTrials):
        print 'trial', i
        vanillap[i],blockp[i],wildp[i]=all_tests(gaussian1,gaussian2,n=200)
    f = open("/nfs/home2/dino/git/test_results.dat", "w")
    vanillarej=sum(vanillap<0.05,0)/numTrials
    blockrej=sum(blockp<0.05,0)/numTrials
    wildrej=sum(wildp<0.05,0)/numTrials
    dump(vanillarej, f)
    dump(blockrej, f)
    dump(wildrej, f)
    f.close()
コード例 #6
0
def main():
    # covariance has stretched Eigenvalues, and rotated basis
    Sigma1 = eye(2)
    Sigma1[0, 0] = 30.0
    Sigma1[1, 1] = 1.0
    Sigma2 = Sigma1
    Sigma2[0, 0] = 20.0
    theta = -pi / 4
    U = MatrixTools.rotation_matrix(theta)
    Sigma1 = U.T.dot(Sigma1).dot(U)
    Sigma2 = U.T.dot(Sigma2).dot(U)

    gaussian1 = Gaussian(Sigma=Sigma1)
    gaussian2 = Gaussian(mu=array([1.0, 0.0]), Sigma=Sigma1)
    numTrials = 500
    vanillap = empty((numTrials, 2, 2))
    blockp = empty((numTrials, 2, 2))
    wildp = empty((numTrials, 2, 2))

    #     f = open("/home/dino/git/test_results.dat", "r")
    #     vanillarej=load(f)
    #     blockrej=load(f)
    #     wildrej=load(f)
    #     f.close()
    #
    for i in range(numTrials):
        print "trial", i
        vanillap[i], blockp[i], wildp[i] = all_tests(gaussian1, gaussian2, n=200)
    f = open("/nfs/home2/dino/git/test_results.dat", "w")
    vanillarej = sum(vanillap < 0.05, 0) / numTrials
    blockrej = sum(blockp < 0.05, 0) / numTrials
    wildrej = sum(wildp < 0.05, 0) / numTrials
    dump(vanillarej, f)
    dump(blockrej, f)
    dump(wildrej, f)
    f.close()
コード例 #7
0
def main():
    numTrials = 500
    n=200
    Sigma1 = eye(2)
    Sigma1[0, 0] = 30.0
    Sigma1[1, 1] = 1.0
    theta = - pi / 4
    U = MatrixTools.rotation_matrix(theta)
    Sigma1 = U.T.dot(Sigma1).dot(U)
    print Sigma1
    gaussian1 = Gaussian(Sigma=Sigma1)
    gaussian2 = Gaussian(mu=array([1., 0.]), Sigma=Sigma1)
    
    oracle_samples1 = gaussian1.sample(n=n).samples
    oracle_samples2 = gaussian2.sample(n=n).samples
    
    print 'mean1:', mean(oracle_samples1,0)
    print 'mean2:', mean(oracle_samples2,0)
    plot(oracle_samples1[:,0],oracle_samples1[:,1],'b*')
    plot(oracle_samples2[:,0],oracle_samples2[:,1],'r*')
    show()
    distribution1 = GaussianFullConditionals(gaussian1, list(gaussian1.mu))
    distribution2 = GaussianFullConditionals(gaussian2, list(gaussian2.mu))
    
    H0_samples = zeros(numTrials)
    HA_samples = zeros(numTrials)
    mcmc_sampler1 = Gibbs(distribution1)
    mcmc_sampler2 = Gibbs(distribution2)
    burnin = 9000
    thin = 5
    start = zeros(2)
    mcmc_params = MCMCParams(start=start, num_iterations=burnin+thin*n, burnin=burnin)
    sigma = GaussianKernel.get_sigma_median_heuristic(concatenate((oracle_samples1,oracle_samples2),axis=0))
    print 'using bandwidth: ', sigma
    kernel = GaussianKernel(sigma=sigma)
    
    for ii in arange(numTrials):
        start =time.time()
        print 'trial:', ii
        
        oracle_samples1 = gaussian1.sample(n=n).samples
        oracle_samples1a = gaussian1.sample(n=n).samples
        oracle_samples2 = gaussian2.sample(n=n).samples
        
        #         chain1 = MCMCChain(mcmc_sampler1, mcmc_params)
        #         chain1.run()
        #         gibbs_samples1 = chain1.get_samples_after_burnin()
        #         gibbs_samples1 = gibbs_samples1[thin*arange(n)]
        #         
        #         chain1a = MCMCChain(mcmc_sampler1, mcmc_params)
        #         chain1a.run()
        #         gibbs_samples1a = chain1a.get_samples_after_burnin()
        #         gibbs_samples1a = gibbs_samples1a[thin*arange(n)]
        #         
        #         chain2 = MCMCChain(mcmc_sampler2, mcmc_params)
        #         chain2.run()
        #         gibbs_samples2 = chain2.get_samples_after_burnin()
        #         gibbs_samples2 = gibbs_samples2[thin*arange(n)]
        
        
        #         H0_samples[ii]=kernel.estimateMMD(gibbs_samples1,gibbs_samples1a)
        #         HA_samples[ii]=kernel.estimateMMD(gibbs_samples1,gibbs_samples2)
        #         
        H0_samples[ii]=kernel.estimateMMD(oracle_samples1,oracle_samples1a)
        HA_samples[ii]=kernel.estimateMMD(oracle_samples1,oracle_samples2)
        end=time.time()
        print 'time elapsed: ', end-start
        
    f = open("/home/dino/git/mmdIIDTrueSamples.dat", "w")
    dump(H0_samples, f)
    dump(HA_samples, f)
    dump(gaussian1, f)
    dump(gaussian2, f)
    f.close()
    return None
コード例 #8
0
def main():
    numTrials = 500
    n = 200
    Sigma1 = eye(2)
    Sigma1[0, 0] = 30.0
    Sigma1[1, 1] = 1.0
    theta = -pi / 4
    U = MatrixTools.rotation_matrix(theta)
    Sigma1 = U.T.dot(Sigma1).dot(U)
    print Sigma1
    gaussian1 = Gaussian(Sigma=Sigma1)
    gaussian2 = Gaussian(mu=array([1., 0.]), Sigma=Sigma1)

    oracle_samples1 = gaussian1.sample(n=n).samples
    oracle_samples2 = gaussian2.sample(n=n).samples

    print 'mean1:', mean(oracle_samples1, 0)
    print 'mean2:', mean(oracle_samples2, 0)
    plot(oracle_samples1[:, 0], oracle_samples1[:, 1], 'b*')
    plot(oracle_samples2[:, 0], oracle_samples2[:, 1], 'r*')
    show()
    distribution1 = GaussianFullConditionals(gaussian1, list(gaussian1.mu))
    distribution2 = GaussianFullConditionals(gaussian2, list(gaussian2.mu))

    H0_samples = zeros(numTrials)
    HA_samples = zeros(numTrials)
    mcmc_sampler1 = Gibbs(distribution1)
    mcmc_sampler2 = Gibbs(distribution2)
    burnin = 9000
    thin = 5
    start = zeros(2)
    mcmc_params = MCMCParams(start=start,
                             num_iterations=burnin + thin * n,
                             burnin=burnin)
    sigma = GaussianKernel.get_sigma_median_heuristic(
        concatenate((oracle_samples1, oracle_samples2), axis=0))
    print 'using bandwidth: ', sigma
    kernel = GaussianKernel(sigma=sigma)

    for ii in arange(numTrials):
        start = time.time()
        print 'trial:', ii

        oracle_samples1 = gaussian1.sample(n=n).samples
        oracle_samples1a = gaussian1.sample(n=n).samples
        oracle_samples2 = gaussian2.sample(n=n).samples

        #         chain1 = MCMCChain(mcmc_sampler1, mcmc_params)
        #         chain1.run()
        #         gibbs_samples1 = chain1.get_samples_after_burnin()
        #         gibbs_samples1 = gibbs_samples1[thin*arange(n)]
        #
        #         chain1a = MCMCChain(mcmc_sampler1, mcmc_params)
        #         chain1a.run()
        #         gibbs_samples1a = chain1a.get_samples_after_burnin()
        #         gibbs_samples1a = gibbs_samples1a[thin*arange(n)]
        #
        #         chain2 = MCMCChain(mcmc_sampler2, mcmc_params)
        #         chain2.run()
        #         gibbs_samples2 = chain2.get_samples_after_burnin()
        #         gibbs_samples2 = gibbs_samples2[thin*arange(n)]

        #         H0_samples[ii]=kernel.estimateMMD(gibbs_samples1,gibbs_samples1a)
        #         HA_samples[ii]=kernel.estimateMMD(gibbs_samples1,gibbs_samples2)
        #
        H0_samples[ii] = kernel.estimateMMD(oracle_samples1, oracle_samples1a)
        HA_samples[ii] = kernel.estimateMMD(oracle_samples1, oracle_samples2)
        end = time.time()
        print 'time elapsed: ', end - start

    f = open("/home/dino/git/mmdIIDTrueSamples.dat", "w")
    dump(H0_samples, f)
    dump(HA_samples, f)
    dump(gaussian1, f)
    dump(gaussian2, f)
    f.close()
    return None
コード例 #9
-1
ファイル: Gaussian.py プロジェクト: karlnapf/kameleon-mcmc
 def __init__(self, mu=array([0, 0]), Sigma=eye(2), is_cholesky=False, ell=None):
     Distribution.__init__(self, len(Sigma))
     
     assert(len(shape(mu)) == 1)
     assert(max(shape(Sigma)) == len(mu))
     self.mu = mu
     self.ell = ell
     if is_cholesky: 
         self.L = Sigma
         if ell == None:
             assert(shape(Sigma)[0] == shape(Sigma)[1])
         else:
             assert(shape(Sigma)[1] == ell)
     else: 
         assert(shape(Sigma)[0] == shape(Sigma)[1])
         if ell is not None:
             self.L, _, _ = MatrixTools.low_rank_approx(Sigma, ell)
             self.L = self.L.T
             assert(shape(self.L)[1] == ell)
         else:
             try:
                 self.L = cholesky(Sigma)
             except LinAlgError:
                 # some really crude check for PSD (which only corrects for orunding errors
                 self.L = cholesky(Sigma+eye(len(Sigma))*1e-5)