import sys
sys.path.insert(0,'demos/')

from diffusion import ContaminantTransportModelUpperLeft

# Number of dimensions
num_dim = 3

# The number of components to use for the mixture
num_comp = 1

#-------- The (hypothetical) joint distribution ----------------

# The prior
collection = [UniformND(1), UniformND(1), GammaPDF(1,0.05,1)]
prior = PDFCollection(collection)
# The data
data = np.load('data_concentrations_upperleft_corner.npy')
# The forward model
diff_model = ContaminantTransportModelUpperLeft()
print 'Num_input'
print str(diff_model.num_input) + '\n'
# The isotropic Likelihood
IsotropicL = IsotropicGaussianLikelihood(data[:], diff_model)
# The joint
log_p = Joint(IsotropicL, prior)
print 'Target:'
print str(log_p)

# The approximating distribution
comp = [MultivariateNormal(np.random.gamma(10,1,num_dim)), MultivariateNormal(np.random.gamma(10,1,num_dim))] 
from vuq import PDFCollection
from vuq import GammaPDF
from vuq import UniformND

# The number of dimensions
num_input = 10

# The parameters
# Alpha is chosen uniformly from [0,10]
alpha = 10 * np.random.rand(num_input)
# Beta is chosen uniformly from [0,1]
beta = np.random.rand(num_input)

# Make a collection of Gammas:
#log_p = PDFCollection([ GammaPDF(alpha[i], beta[i], 1) for i in xrange(num_input) ])
log_p = PDFCollection([UniformND(1) for i in xrange(num_input)])

print str(log_p)

print 'Evaluating at many points'
print '-' * 80
#x = np.random.gamma(10, 1, [20, num_input])
x = np.random.rand(20, num_input)
y = log_p(x)
print 'Shape:', y.shape
print 'log pdf:'
print str(y) + '\n'

# Test the evaluation of the gradient at many inputs simultaneously
print 'Evaluating gradient at many points'
print '-' * 80
Ejemplo n.º 3
0
from vuq import FirstOrderEntropyApproximation
from vuq import ThirdOrderExpectationFunctional
from vuq import EvidenceLowerBound
from vuq import FullOptimizer
from demos import TestModel1

# The number of components to use for the mixture
num_comp = 5

# The model
model = TestModel1()

# The prior
log_p_x = MultivariateNormal(mu=[0])
log_p_z_fake = FlatPDF(model.num_output)
log_p_x_ext = PDFCollection([log_p_x, log_p_z_fake])
# The isotropic Likelihood
log_p_z_given_x = UncertaintyPropagationLikelihood(model, alpha=1e-1)
# The joint
log_p = Joint(log_p_z_given_x, log_p_x_ext)

# The approximating distribution
log_q = MixtureOfMultivariateNormals.create(log_p.num_dim, num_comp)

# Build the ELBO
# Pick an entropy approximation
entropy = FirstOrderEntropyApproximation()
# Pick an approximation for the expectation of the joint
expectation_functional = ThirdOrderExpectationFunctional(log_p)
# Build the ELBO
elbo = EvidenceLowerBound(entropy, expectation_functional)
from vuq import GammaPDF
from vuq import UniformND


# The number of dimensions
num_input = 10

# The parameters
# Alpha is chosen uniformly from [0,10]
alpha = 10*np.random.rand(num_input)
# Beta is chosen uniformly from [0,1]
beta = np.random.rand(num_input)

# Make a collection of Gammas:
#log_p = PDFCollection([ GammaPDF(alpha[i], beta[i], 1) for i in xrange(num_input) ])
log_p = PDFCollection([ UniformND(1) for i in xrange(num_input) ])

print str(log_p)

print 'Evaluating at many points'
print '-' * 80
#x = np.random.gamma(10, 1, [20, num_input])
x = np.random.rand(20, num_input)
y = log_p(x)
print 'Shape:', y.shape
print 'log pdf:'
print str(y) + '\n'


# Test the evaluation of the gradient at many inputs simultaneously
print 'Evaluating gradient at many points'