コード例 #1
0
    def sample_posterior_M(self, thetaval):
        n = np.shape(self.X)[0]
        m = np.shape(self.Z)[0]
        K = GaussianKernel(thetaval)
        Kxz = K.kernel(self.X, self.Z)
        Kyz = K.kernel(self.Y, self.Z)
        G = Kxz - Kyz  # Compute the observations
        Delta_val = np.mean(G, axis=0)
        Dzz = squareform(pdist(self.Z, 'sqeuclidean'))  # Compute the R matrix
        R = np.exp(-Dzz / float(4 * thetaval**2)) + 10**(-8) * np.eye(m)
        H = np.eye(n) - np.ones((n, n)) / np.float(n)
        if self.ifindependent:
            Sigma1 = Kxz.T.dot(H.dot(Kxz)) / (n**2)
            Sigma2 = Kyz.T.dot(H.dot(Kyz)) / (n**2)
            Sigma = Sigma1 + Sigma2 + 10**(-8) * np.eye(m)
        else:
            Sigma = np.transpose(G).dot(H.dot(G)) / np.float(
                n**2) + 10**(-8) * np.eye(m)

        BF = multivariate_normal.pdf(
            Delta_val, cov=Sigma) / multivariate_normal.pdf(Delta_val,
                                                            cov=R + Sigma)
        Prob_M1 = 1 / np.float(BF + 1)
        mm = bernoulli.rvs(Prob_M1, size=1)
        if mm == 0:
            M = 0
        else:
            M = 1
        return BF, M
コード例 #2
0
ファイル: RFF2.py プロジェクト: Mick116/Dist-Regression
    entro = list()
    m0 = 0.0

    for ii in np.arange(l):
        pois = np.random.normal(m0, sd[ii], n)
        en = 0.5 * np.log(2 * np.pi * np.e * (sd[ii]**2))
        sample.append(pois)
        entro.append(en)

    return sample, entro


sam1d_tr, entro1d_tr = sam1d_gen(5, 10)
print sam1d_tr
#print entro1d_tr
sam1d_tt, entro1d_tt = sam1d_gen(2, 5)

##########################################################
# conduct the ridge regression
data_gamma = 1.0
bag_gamma = 1.0
data_kernel = GaussianKernel(data_gamma)
print data_kernel.kernel(sam1d_tr)

bag_kernel = GaussianBagKernel(data_kernel, bag_gamma)
#standard distribution regression - computes full kernel matrices
#coeff,ypred=bag_kernel.ridge_regress(sam1d_tr,entro1d_tr,lmbda=0.01,Xtst=sam1d_tt)
#or distribution regression with random features
#bag_kernel.rff_generate(50,60,dim=dim) #50 random features for bag_kernel, 60 for data_kernel
#coeff,ypred=bag_kernel.ridge_regress_rff(baglistX,y,Xtst=baglistXtst)
コード例 #3
0
    
    for ii in np.arange(l):
        pois = np.random.normal(m0,sd[ii],n)
        en = 0.5 * np.log(2* np.pi * np.e * (sd[ii] ** 2))
        sample.append(pois)
        entro.append(en)
    
    return sample, entro

sam1d_tr, entro1d_tr = sam1d_gen(5,10)
print sam1d_tr
#print entro1d_tr
sam1d_tt, entro1d_tt = sam1d_gen(2,5)
 
##########################################################
# conduct the ridge regression
data_gamma = 1.0
bag_gamma = 1.0
data_kernel = GaussianKernel(data_gamma)
print data_kernel.kernel(sam1d_tr)

bag_kernel = GaussianBagKernel(data_kernel,bag_gamma)
#standard distribution regression - computes full kernel matrices
#coeff,ypred=bag_kernel.ridge_regress(sam1d_tr,entro1d_tr,lmbda=0.01,Xtst=sam1d_tt)
#or distribution regression with random features
#bag_kernel.rff_generate(50,60,dim=dim) #50 random features for bag_kernel, 60 for data_kernel
#coeff,ypred=bag_kernel.ridge_regress_rff(baglistX,y,Xtst=baglistXtst)