def __init__(self, model, data=Data(), transform=tf.identity): if hasattr(model, 'num_vars'): variational = Variational() variational.add(PointMass(model.num_vars, transform)) else: variational = Variational() variational.add(PointMass(0, transform)) VariationalInference.__init__(self, model, variational, data)
def __init__(self, model, variational, data=Data()): Inference.__init__(self, model, data) self.variational = variational
def __init__(self, model, data=Data()): self.model = model self.data = data get_session()
def ppc(model, variational=None, data=Data(), T=None, size=100, sess=tf.Session()): """ Posterior predictive check. (Rubin, 1984; Meng, 1994; Gelman, Meng, and Stern, 1996) If variational is not specified, it defaults to a prior predictive check (Box, 1980). PPC's form an empirical distribution for the predictive discrepancy, p(T) = \int p(T(yrep) | z) p(z | y) dz by drawing replicated data sets yrep and calculating T(yrep) for each data set. Then it compares it to T(y). Parameters ---------- model : Model class object with a 'sample_likelihood' method variational : Variational, optional latent variable distribution q(z) to sample from. It is an approximation to the posterior, e.g., a variational approximation or an empirical distribution from MCMC samples. If not specified, samples will be obtained from model with a 'sample_prior' method. data : Data, optional Observed data to compare to. If not specified, will return only the reference distribution with an assumed replicated data set size of 1. T : function, optional Discrepancy function written in TensorFlow. Default is identity. It is a function taking in a data set y and optionally a set of latent variables z as input. size : int, optional number of replicated data sets sess : tf.Session, optional session used during inference Returns ------- list List containing the reference distribution, which is a Numpy vector of size elements, (T(yrep^{1}, z^{1}), ..., T(yrep^{size}, z^{size})); and the realized discrepancy, which is a NumPy vector of size elements, (T(y, z^{1}), ..., T(y, z^{size})). """ y = data.data if y == None: N = 1 else: N = data.N if T == None: T = lambda y, z=None: y # 1. Sample from posterior (or prior). # We must fetch zs out of the session because sample_likelihood() # may require a SciPy-based sampler. if variational != None: zs, samples = variational.sample(y, size=size) feed_dict = variational.np_sample(samples, size, sess=sess) zs = sess.run(zs, feed_dict) else: zs = model.sample_prior(size=size) zs = sess.run(zs) # 2. Sample from likelihood. yreps = model.sample_likelihood(zs, size=N) # 3. Calculate discrepancy. Tyreps = [] Tys = [] for yrep, z in zip(yreps, tf.unpack(zs)): Tyreps += [T(yrep, z)] if y != None: Tys += [T(y, z)] if y == None: return sess.run(tf.pack(Tyreps), feed_dict) else: return sess.run([tf.pack(Tyreps), tf.pack(Tys)], feed_dict)
def __init__(self, model, data=Data()): self.model = model self.data = data
def __init__(self, model, data=Data(), transform=tf.identity): variational = PointMass(model.num_vars, transform) VariationalInference.__init__(self, model, variational, data)