import numpy, pylab
import theano
from theano import tensor
from rstreams import RandomStreams
import distributions
from sample import mh2_sample
from rv import full_log_likelihood

s_rng = RandomStreams(3424)

p = s_rng.dirichlet(numpy.asarray([1, 1]))[0]
m1 = s_rng.uniform(low=-5, high=5)
m2 = s_rng.uniform(low=-5, high=5)
v = s_rng.uniform(low=0, high=1)

C = s_rng.binomial(1, p, draw_shape=(4,))
m = tensor.switch(C, m1, m2)
D = s_rng.normal(m, v, draw_shape=(4,))        

D_data = numpy.asarray([1, 1.2, 3, 3.4], dtype=theano.config.floatX)

givens = dict([(D, D_data)])
sampler = mh2_sample(s_rng, [p, m1, m2, v], givens)            

samples = sampler(200, 1000, 100)
print samples[0].mean(), samples[1].mean(), samples[2].mean(), samples[3].mean()
# Actual linear model
y = lambda x_in: tensor.dot(poly_expansion(x_in, m), w)

# Observation model
t = s_rng.normal(y(x), alpha, draw_shape=(10, ))

# Generate some noisy training data (sine + noise)
X_data = numpy.arange(-1, 1, 0.3)
Y_data = numpy.sin(numpy.pi * X_data) + 0.1 * numpy.random.randn(*X_data.shape)
X_data.shape = (X_data.shape[0], 1)

X_new = numpy.arange(-1, 1, 0.05)
X_new.shape = (X_new.shape[0], 1)

pylab.plot(X_data, Y_data, 'x', markersize=10)

# Generate samples from the model
sampler = mh2_sample(s_rng, [y(xn)],
                     observations={t: Y_data},
                     givens={
                         x: X_data,
                         xn: X_new
                     })
samples = sampler(50, 1000, 200)
pylab.errorbar(X_new, numpy.mean(samples[0].T, axis=1),
               numpy.std(samples[0].T, axis=1))
pylab.show()
pylab.plot(X_new, samples[0].T)
pylab.show()
import numpy, pylab
import theano
from theano import tensor
from rstreams import RandomStreams
import distributions
from sample import mh2_sample
from for_theano import evaluate
from rv import full_log_likelihood

s_rng = RandomStreams(23424)

fair_prior = 0.999

coin_weight = tensor.switch(
    s_rng.binomial(1, fair_prior) > 0.5, 0.5,
    s_rng.dirichlet([1, 1])[0])

make_coin = lambda p, size: s_rng.binomial(1, p, draw_shape=(size, ))
coin = lambda size: make_coin(coin_weight, size)

for size in [1, 3, 6, 10, 20, 30, 50, 70, 100]:
    data = evaluate(make_coin(0.9, size))

    sampler = mh2_sample(s_rng, [coin_weight], {coin(size): data})

    print "nr of examples", size, ", estimated probability", sampler(
        nr_samples=400, burnin=20000, lag=10)[0].mean()
marbles_bag_1 = numpy.asarray([[1,1,1,1,1,1],
                               [0,0,0,0,0,0],
                               [0,0,0,0,0,0],
                               [0,0,0,0,0,0],
                               [0,0,0,0,0,0]], dtype=theano.config.floatX).T                                
marbles_bag_2 = numpy.asarray([[0,0,0,0,0,0],
                               [1,1,1,1,1,1],
                               [0,0,0,0,0,0],
                               [0,0,0,0,0,0],
                               [0,0,0,0,0,0]], dtype=theano.config.floatX).T 
marbles_bag_3 = numpy.asarray([[0,0,0,0,0,0],
                               [0,0,0,0,0,0],
                               [0,0,0,0,0,0],
                               [1,1,1,1,1,1],
                               [0,0,0,0,0,0]], dtype=theano.config.floatX).T 
marbles_bag_4 = numpy.asarray([[0],[0],[0],[0],[1]], dtype=theano.config.floatX).T 

givens = {draw_marbles(1,6): marbles_bag_1,
            draw_marbles(2,6): marbles_bag_2,
            draw_marbles(3,6): marbles_bag_3,
            draw_marbles(4,1): marbles_bag_4}
            
sampler = mh2_sample(s_rng, [draw_marbles(4,1)], givens)            

samples = sampler(200, 100, 100)
data = samples[0]

pylab.bar(range(5), data.sum(axis=0))
pylab.show()
import numpy, pylab
import theano
from theano import tensor
from rstreams import RandomStreams
import distributions
from sample import mh2_sample
from for_theano import evaluate
from rv import full_log_likelihood

s_rng = RandomStreams(23424)

fair_prior = 0.999

coin_weight = tensor.switch(s_rng.binomial(1, fair_prior) > 0.5, 0.5, s_rng.dirichlet([1, 1])[0])

make_coin = lambda p, size: s_rng.binomial(1, p, draw_shape=(size,))    
coin = lambda size: make_coin(coin_weight, size)
            
for size in [1, 3, 6, 10, 20, 30, 50, 70, 100]:
    data = evaluate(make_coin(0.9, size))
            
    sampler = mh2_sample(s_rng, [coin_weight], {coin(size) : data})            
    
    print "nr of examples", size, ", estimated probability", sampler(nr_samples=400, burnin=20000, lag=10)[0].mean()
w = s_rng.normal(0, beta, draw_shape=(m+1,))

# Input variable used for training
x = tensor.matrix('x')
# Input variable used for testing
xn = tensor.matrix('xn')

# Actual linear model
y = lambda x_in: tensor.dot(poly_expansion(x_in, m), w)

# Observation model
t = s_rng.normal(y(x), alpha, draw_shape=(10,))

# Generate some noisy training data (sine + noise)
X_data = numpy.arange(-1,1,0.3)
Y_data = numpy.sin(numpy.pi*X_data) + 0.1*numpy.random.randn(*X_data.shape)
X_data.shape = (X_data.shape[0],1)

X_new = numpy.arange(-1,1,0.05)
X_new.shape = (X_new.shape[0],1)

pylab.plot(X_data, Y_data, 'x', markersize=10)

# Generate samples from the model
sampler = mh2_sample(s_rng, [y(xn)], observations={t: Y_data}, givens={x: X_data, xn: X_new})            
samples = sampler(50, 1000, 200)
pylab.errorbar(X_new, numpy.mean(samples[0].T, axis=1), numpy.std(samples[0].T, axis=1))
pylab.show()
pylab.plot(X_new, samples[0].T)
pylab.show()
                               [0,0,0,0,0,0]], dtype=theano.config.floatX).T 
marbles_bag_4 = numpy.asarray([[0],[0],[0],[0],[1]], dtype=theano.config.floatX).T 



# Define flat model
bag_prototype =  memoized(lambda bag: s_rng.dirichlet(numpy.asarray([1, 1, 1, 1, 1])*5))
draw_marbles = lambda bag, nr: s_rng.multinomial(1, bag_prototype(bag), draw_shape=(nr,))

# Generate samples from the model
givens = {draw_marbles(1,6): marbles_bag_1,
            draw_marbles(2,6): marbles_bag_2,
            draw_marbles(3,6): marbles_bag_3,
            draw_marbles(4,1): marbles_bag_4}
            
sampler = mh2_sample(s_rng, [draw_marbles(4,1)], givens)            

samples = sampler(200, 100, 100)
data = samples[0]

# Show histogram
pylab.subplot(211)
pylab.bar(range(5), data.sum(axis=0))
pylab.title("Flat model")



# Define hierarchical model
phi = s_rng.dirichlet(numpy.asarray([1, 1, 1, 1, 1]))
alpha = s_rng.gamma(2., 2.)        
prototype = phi*alpha
import numpy, pylab
import theano
from theano import tensor
from rstreams import RandomStreams
import distributions
from sample import mh2_sample
from rv import full_log_likelihood

s_rng = RandomStreams(3424)

p = s_rng.dirichlet(numpy.asarray([1, 1]))[0]
m1 = s_rng.uniform(low=-5, high=5)
m2 = s_rng.uniform(low=-5, high=5)
v = s_rng.uniform(low=0, high=1)

C = s_rng.binomial(1, p, draw_shape=(4, ))
m = tensor.switch(C, m1, m2)
D = s_rng.normal(m, v, draw_shape=(4, ))

D_data = numpy.asarray([1, 1.2, 3, 3.4], dtype=theano.config.floatX)

givens = dict([(D, D_data)])
sampler = mh2_sample(s_rng, [p, m1, m2, v], givens)

samples = sampler(200, 1000, 100)
print samples[0].mean(), samples[1].mean(), samples[2].mean(), samples[3].mean(
)