class AutoRegressive(RandomVariable, Distribution): # a 1-D AR(1) process # a[t + 1] = a[t] + eps with eps ~ N(0, sig**2) def __init__(self, T, a, sig, *args, **kwargs): self.a = a self.sig = sig self.T = T self.shocks = Normal(tf.zeros(T), scale=sig) self.z = tf.scan(lambda acc, x: self.a * acc + x, self.shocks) if 'dtype' not in kwargs: kwargs['dtype'] = tf.float32 if 'allow_nan_stats' not in kwargs: kwargs['allow_nan_stats'] = False if 'reparameterization_type' not in kwargs: kwargs['reparameterization_type'] = FULLY_REPARAMETERIZED if 'validate_args' not in kwargs: kwargs['validate_args'] = False if 'name' not in kwargs: kwargs['name'] = 'AutoRegressive' super(AutoRegressive, self).__init__(*args, **kwargs) self._args = (T, a, sig) def _log_prob(self, value): err = value - self.a * tf.pad(value[:-1], [[1, 0]], 'CONSTANT') lpdf = self.shocks._log_prob(err) return tf.reduce_sum(lpdf) def _sample_n(self, n, seed=None): return tf.scan(lambda acc, x: self.a * acc + x, self.shocks._sample_n(n, seed))
import edward as ed import matplotlib.pyplot as plt import matplotlib.cm as cm import numpy as np import six import tensorflow as tf from edward.models import Categorical, Dirichlet, Empirical, InverseGamma, \ MultivariateNormalDiag, Normal, ParamMixture D=3 K=10 x = Normal(tf.zeros(D), tf.ones(D), sample_shape=K) print(x) #s=ed.Gibbs({x:x}) #inference.initialize() sess = ed.get_session() #tf.global_variables_initializer().run() print(sess.run(x._sample_n(10)))