Example #1
0
labels = np.hstack((np.zeros(N / 2), np.ones(N / 2)))
bayes0 = GaussianBayes(np.zeros(D), 1, 8, np.eye(D) * 3, norm_trn_data0)
bayes1 = GaussianBayes(np.zeros(D), 1, 8, np.eye(D) * 3, norm_trn_data1)

# Gaussian Analytic
gc = GaussianCls(bayes0, bayes1)
errors['gauss'] = gc.approx_error_data(norm_tst_data, labels)
print("Gaussian Analytic error: %f" % errors['gauss'])

# MPM Model
numlam = 100
dist0 = MPMDist(trn_data0, kmax=1, priorkappa=80, lammove=0.02, mumove=0.1)
dist1 = MPMDist(trn_data1, kmax=1, priorkappa=80, lammove=0.02, mumove=0.1)
mpm = MPMCls(dist0, dist1)
mh = mh.MHRun(mpm, burn=100, thin=20)
mh.sample(2e3, verbose=False)
errors['mpm'] = mpm.approx_error_data(mh.db, tst_data, labels, numlam=numlam)
print("MPM Sampler error: %f" % errors['mpm'])

output['acceptance'] = float(mh.accept_loc) / mh.total_loc
output['seed'] = seed

p.figure()


def myplot(ax, g, data0, data1, gext):
    ax.plot(data0[:, 0], data0[:, 1], 'g.', label='0', alpha=0.3)
    ax.plot(data1[:, 0], data1[:, 1], 'r.', label='1', alpha=0.3)
    ax.legend(fontsize=8, loc='best')
Example #2
0
dist0 = MPMDist(trn_data0,
                kmax=1,
                priorkappa=90,
                lammove=0.01,
                mumove=0.18,
                d=10.0,
                usepriors=up)
dist1 = MPMDist(trn_data1,
                kmax=1,
                priorkappa=90,
                lammove=0.01,
                mumove=0.18,
                d=10.0,
                usepriors=up)
mpm = MPMCls(dist0, dist1)
mhmc = mh.MHRun(mpm, burn=3000, thin=20)
mhmc.sample(iters, verbose=False)
errors['mpm'] = mpm.approx_error_data(mhmc.db, tst_data, tst_labels, numlam=40)
print("MPM Sampler error: %f" % errors['mpm'])

#p.close('all')

#def jitter(x):
#rand = np.random.randn
#return x + rand(*x.shape)*0.0

#def myplot(ax,g,data0,data1,gext):
#ax.plot(data0[:,0], data0[:,1], 'g.',label='0', alpha=0.5)
#ax.plot(data1[:,0], data1[:,1], 'r.',label='1', alpha=0.5)
#ax.legend(fontsize=8, loc='best')
Example #3
0
# MPM Model
#d0 = np.asarray(mquantiles(trn_data0, 0.75, axis=1)).reshape(-1)
#d1 = np.asarray(mquantiles(trn_data1, 0.75, axis=1)).reshape(-1)
#dist0 = MPMDist(trn_data0,kmax=1,priorkappa=150,lammove=0.01,mumove=0.08,d=d0)
#dist1 = MPMDist(trn_data1,kmax=1,priorkappa=150,lammove=0.01,mumove=0.08,d=d1)

up = True
kappa = 10.0
S = np.eye(4) * 0.4 * (kappa - 1 - 4)
dist0 = MPMDist(trn_data0,kmax=1,priorkappa=200,lammove=0.05,mumove=0.08,usepriors=up,
        kappa=kappa, S=S)
dist1 = MPMDist(trn_data1,kmax=1,priorkappa=200,lammove=0.05,mumove=0.08,usepriors=up,
        kappa=kappa, S=S)
mpm1 = MPMCls(dist0, dist1) 
mhmc1 = mh.MHRun(mpm1, burn=2000, thin=50)
mhmc1.sample(iters,verbose=False)
errors['mpm'] = mpm1.approx_error_data(mhmc1.db, tst_data, tst_labels,numlam=numlam)
print("")
print("skLDA error: %f" % errors['lda'])
print("skKNN error: %f" % errors['knn'])
print("skSVM error: %f" % errors['svm'])
print("gauss error: %f" % errors['gauss'])
print("my MP error: %f" % errors['mpm'])

#n,gext,grid = get_grid_data(np.vstack(( trn_data0, trn_data1 )), positive=True)

#def myplot(ax,g,data0,data1,gext):
    #ax.plot(data0[:,0], data0[:,1], 'g.',label='0', alpha=0.5)
    #ax.plot(data1[:,0], data1[:,1], 'r.',label='1', alpha=0.5)
    #ax.legend(fontsize=8, loc='best')
Example #4
0
import pandas as pa

import samcnet.mh as mh
from samcnet.mixturepoisson import *

trn_data0 = pa.read_csv('tests/ex_data_0.csv', header=None)
trn_data1 = pa.read_csv('tests/ex_data_1.csv', header=None)
predict_samples = pa.read_csv('tests/ex_data_predict.csv', header=None)

dist0 = MPMDist(trn_data0)
dist1 = MPMDist(trn_data1)
mpm = MPMCls(dist0, dist1)
mh = mh.MHRun(mpm, burn=1000, thin=50, verbose=True)
mh.sample(1e4)

print(mpm.predict(mh.db, predict_samples))
mh.db.close()
Example #5
0
                       np.eye(num_feat) * (kappa - 1 - num_feat),
                       norm_trn_data0)
bayes1 = GaussianBayes(np.zeros(num_feat), 1, kappa,
                       np.eye(num_feat) * (kappa - 1 - num_feat),
                       norm_trn_data1)

# Gaussian Analytic
gc = GaussianCls(bayes0, bayes1)
errors['gauss'] = gc.approx_error_data(norm_tst_data, tst_labels)
print("Gaussian Analytic error: %f" % errors['gauss'])

# MPM Model
dist0 = MPMDist(trn_data0, kmax=1, priorkappa=150, lammove=0.01, mumove=0.08)
dist1 = MPMDist(trn_data1, kmax=1, priorkappa=150, lammove=0.01, mumove=0.08)
mpm = MPMCls(dist0, dist1)
mhmc = mh.MHRun(mpm, burn=1000, thin=50)
mhmc.sample(iters, verbose=False)
errors['mpm'] = mpm.approx_error_data(mhmc.db, tst_data, tst_labels, numlam=50)
print("MPM Sampler error: %f" % errors['mpm'])

output['acceptance'] = float(mhmc.accept_loc) / mhmc.total_loc
mhmc.clean_db()

# MPM Model
priorsigma = np.ones(4) * 0.1
pm0 = np.ones(4) * mu0
pm1 = np.ones(4) * mu1
ud = True
dist0 = MPMDist(
    trn_data0,
    kmax=1,
Example #6
0
########################################
########################################
########################################
########################################
########################################
# MPM Model
dist0 = MPMDist(rawdata.loc[sel['trn0'], sel['feats']],
                priorkappa=priorkappa,
                lammove=lammove,
                mumove=mumove)
dist1 = MPMDist(rawdata.loc[sel['trn1'], sel['feats']],
                priorkappa=priorkappa,
                lammove=lammove,
                mumove=mumove)
mpm = MPMCls(dist0, dist1)
mhmc = mh.MHRun(mpm, burn=burn, thin=thin)
mhmc.sample(iters, verbose=False)
errors['mpm'] = mpm.approx_error_data(mhmc.db,
                                      tst_data,
                                      sel['tstl'],
                                      numlam=numlam)
print("MPM Sampler error: %f" % errors['mpm'])

output['acceptance'] = float(mhmc.accept_loc) / mhmc.total_loc
########################################
########################################
########################################
########################################
########################################
# Calibrated MPM Model
p0, p1 = calibrate(rawdata, sel, params)