Ejemplo n.º 1
0
def normal_mild_corr(N):
    X = metropolis_hastings(log_normal,
                            chain_size=N,
                            thinning=1,
                            x_prev=np.random.randn(),
                            step=0.55)
    return X
Ejemplo n.º 2
0
def sample_sgld_t_student(N, degree_of_freedom, epsilon):
    grd_log = grad_log_t_df(degree_of_freedom)
    X = metropolis_hastings(grd_log,
                            chain_size=N,
                            thinning=1,
                            x_prev=np.random.randn(),
                            step=0.50)
    return X
Ejemplo n.º 3
0
def gen(N, df, thinning=1):
    log_den = log_normal
    if df < np.Inf:
        log_den = grad_log_t_df(df)

    return metropolis_hastings(log_den,
                               chain_size=N,
                               thinning=thinning,
                               x_prev=np.random.randn(),
                               step=0.5)
from sampplers.MetropolisHastings import metropolis_hastings
import numpy as np


np.random.seed(SEED)
X = gen_X(SAMPLE_SIZE)


def vectorized_log_density(theta):
    return log_probability(theta, X)


t1 = time()


sample = []
no_chains = NUMBER_OF_TESTS * NO_OF_SAMPELS_IN_TEST
for i in range(no_chains):
    if i % 100 == 0:
        print(i * 100.0 / no_chains)
        print(time() - t1)
    sample.append(
        metropolis_hastings(vectorized_log_density, chain_size=CHAIN_SIZE, thinning=1, x_prev=np.random.randn(2))
    )

sample = np.array(sample)


np.save("samples.npy", sample)
Ejemplo n.º 5
0
    return log_normal

def grad_log_dens(x):
    return -x

arr = np.empty((0,2))

arr2 = np.empty((0,2))
for c in [1.0,1.3,2.0,3.0]:
    print('c',c)

    log_normal = logg(c)

    for i in range(23):
        print(i)
        x= metropolis_hastings(log_normal, chain_size=500, thinning=15,x_prev=np.random.randn(2))



        me = GaussianQuadraticTest(grad_log_dens)
        qm = QuadraticMultiple(me)
        qm2 = QuadraticMultiple2(me)

        accept_null,p_val = qm.is_from_null(0.05, x, 0.1)
        p_val2 = qm2.is_from_null(0.05, x, 0.1)
        print(p_val2)
        arr = np.vstack((arr, np.array([c,min(p_val)])))
        arr2 = np.vstack((arr2, np.array([c,p_val2])))


Ejemplo n.º 6
0
def grad_log_dens(x):
    return -x


arr = np.empty((0, 2))

arr2 = np.empty((0, 2))
for c in [1.0, 1.3, 2.0, 3.0]:
    print('c', c)

    log_normal = logg(c)

    for i in range(23):
        print(i)
        x = metropolis_hastings(log_normal,
                                chain_size=500,
                                thinning=15,
                                x_prev=np.random.randn(2))

        me = GaussianQuadraticTest(grad_log_dens)
        qm = QuadraticMultiple(me)
        qm2 = QuadraticMultiple2(me)

        accept_null, p_val = qm.is_from_null(0.05, x, 0.1)
        p_val2 = qm2.is_from_null(0.05, x, 0.1)
        print(p_val2)
        arr = np.vstack((arr, np.array([c, min(p_val)])))
        arr2 = np.vstack((arr2, np.array([c, p_val2])))

df = DataFrame(arr)
pr = seaborn.boxplot(x=0, y=1, data=df)
seaborn.plt.show()
__author__ = 'kcx'
import numpy as np


def log_normal(x):
    return -np.dot(x, x) / 2


thining_jump = 20
chain_size = 10000
results = np.zeros((thining_jump, 3))

for thining in range(1, thining_jump, 2):
    print('thining ', thining)
    pval = []

    for i in range(1000):
        x = metropolis_hastings(log_normal,
                                chain_size=chain_size,
                                thinning=thining)

        me = GaussianSteinTest(x, log_normal)

        pval.append(me.compute_pvalue())

    res = np.percentile(pval, [5, 10, 15]) * 100.0
    results[thining] = res

print(results)

np.save('temp_quantiles.npy', results)
Ejemplo n.º 8
0
def gen(N, df, thinning=1):
    log_den = log_normal
    if df < np.Inf:
        log_den = grad_log_t_df(df)

    return metropolis_hastings(log_den, chain_size=N, thinning=thinning, x_prev=np.random.randn(), step=0.5)
Ejemplo n.º 9
0
def normal_mild_corr(N):
    X =  metropolis_hastings(log_normal, chain_size=N, thinning=1, x_prev=np.random.randn(),step=0.55)
    return X
Ejemplo n.º 10
0
def sample_sgld_t_student(N,degree_of_freedom,epsilon):
    grd_log = grad_log_t_df(degree_of_freedom)
    X =  metropolis_hastings(grd_log, chain_size=N, thinning=1, x_prev=np.random.randn(),step=0.50)
    return X
Ejemplo n.º 11
0
__author__ = 'kcx'
import numpy as np



def log_normal(x):
    return -np.dot(x,x)/2


thining_jump = 20
chain_size = 10000
results = np.zeros((thining_jump,3))

for thining in range(1,thining_jump,2):
    print('thining ', thining)
    pval = []

    for i in range(1000):
        x= metropolis_hastings(log_normal, chain_size=chain_size, thinning=thining)

        me = GaussianSteinTest(x,log_normal)

        pval.append(me.compute_pvalue())


    res = np.percentile(pval, [5,10,15])*100.0
    results[thining] = res

print(results)

np.save('temp_quantiles.npy',results)
Ejemplo n.º 12
0
from sgld_test.likelihoods import gen_X, log_probability

from sampplers.MetropolisHastings import metropolis_hastings
import numpy as np

np.random.seed(SEED)
X = gen_X(SAMPLE_SIZE)


def vectorized_log_density(theta):
    return log_probability(theta, X)


t1 = time()

sample = []
no_chains = NUMBER_OF_TESTS * NO_OF_SAMPELS_IN_TEST
for i in range(no_chains):
    if i % 100 == 0:
        print(i * 100.0 / no_chains)
        print(time() - t1)
    sample.append(
        metropolis_hastings(vectorized_log_density,
                            chain_size=CHAIN_SIZE,
                            thinning=1,
                            x_prev=np.random.randn(2)))

sample = np.array(sample)

np.save('samples.npy', sample)