Exemplo n.º 1
0
def _init_uw_global_shared(start, global_RVs, global_order):
    start = {v.name: start[v.name] for v in global_RVs}
    bij = DictToArrayBijection(global_order, start)
    u_start = bij.map(start)
    w_start = np.zeros_like(u_start)
    uw_start = np.concatenate([u_start, w_start])
    uw_global_shared = theano.shared(uw_start, 'uw_global_shared')

    return uw_global_shared, bij
Exemplo n.º 2
0
def _init_uw_global_shared(start, global_RVs, global_order):
    start = {v.name: start[v.name] for v in global_RVs}
    bij = DictToArrayBijection(global_order, start)
    u_start = bij.map(start)
    w_start = np.zeros_like(u_start)
    uw_start = np.concatenate([u_start, w_start])
    uw_global_shared = theano.shared(uw_start, 'uw_global_shared')

    return uw_global_shared, bij
Exemplo n.º 3
0
    def test_list_ordering(self):
        a = num.random.rand(10).reshape((5, 2))
        b = num.random.rand(5).reshape((5, 1))
        c = num.random.rand(1).reshape((1, 1))
        ta = tt.matrix('a')
        tb = tt.matrix('b')
        tc = tt.matrix('c')
        ta.tag.test_value = a
        tb.tag.test_value = b
        tc.tag.test_value = c
        tvars = [ta, tb, tc]
        with self.assertRaises(KeyError):
            ordering = utility.ListArrayOrdering(tvars)
            ordering['b']

        lordering = utility.ListArrayOrdering(tvars, 'tensor')
        lordering['b'].slc

        for var in ordering:
            print var

        lpoint = [a, b, c]
        lij = utility.ListToArrayBijection(lordering, lpoint)

        ref_point = {'a': a, 'b':b, 'c':c}
        array = lij.l2a(lpoint)
        point = lij.l2d(lpoint)
        print 'arr', array
        #print 'point, ref_point', point, ref_point

        print lij.l2d(lij.a2l(array))

        ordering = ArrayOrdering(tvars)
        bij = DictToArrayBijection(ordering, point)
        array2 = bij.map(point)
        print 'bija', array2
Exemplo n.º 4
0
 def _setup(self, **kwargs):
     self._histogram_order = ArrayOrdering(self.global_vars)
     self._bij = DictToArrayBijection(self._histogram_order, dict())
Exemplo n.º 5
0
class Histogram(Approximation):
    """
    Builds Approximation instance from a given trace,
    it has the same interface as variational approximation

    Parameters
    ----------
    trace : MultiTrace
    local_rv : dict
        Experimental for Histogram
        mapping {model_variable -> local_variable}
        Local Vars are used for Autoencoding Variational Bayes
        See (AEVB; Kingma and Welling, 2014) for details

    model : PyMC3 model

    Usage
    -----
    >>> with model:
    ...     step = NUTS()
    ...     trace = sample(1000, step=step)
    ...     histogram = Histogram(trace[100:])
    """
    def __init__(self, trace, local_rv=None, model=None):
        super(Histogram, self).__init__(local_rv=local_rv,
                                        model=model,
                                        trace=trace)

    def check_model(self, model, **kwargs):
        trace = kwargs.get('trace')
        if (trace is not None and
                not all([var.name in trace.varnames
                         for var in model.free_RVs])):
            raise ValueError('trace has not all FreeRV')

    def _setup(self, **kwargs):
        self._histogram_order = ArrayOrdering(self.global_vars)
        self._bij = DictToArrayBijection(self._histogram_order, dict())

    def create_shared_params(self, **kwargs):
        trace = kwargs.get('trace')
        if trace is None:
            histogram = np.atleast_2d(self._bij.map(self.model.test_point))
        else:
            histogram = np.empty((len(trace), self.global_size))
            for i in range(len(trace)):
                histogram[i] = self._bij.map(trace[i])
        return theano.shared(pm.floatX(histogram), 'histogram')

    def randidx(self, size=None):
        if size is None:
            size = ()
        elif isinstance(size, tt.TensorVariable):
            if size.ndim < 1:
                size = size[None]
            elif size.ndim > 1:
                raise ValueError('size ndim should be no more than 1d')
            else:
                pass
        else:
            size = tuple(np.atleast_1d(size))
        return (tt_rng().uniform(size=size,
                                 low=0.0,
                                 high=self.histogram.shape[0] -
                                 1e-16).astype('int64'))

    def random_global(self, size=None, no_rand=False):
        theano_condition_is_here = isinstance(no_rand, tt.Variable)
        if theano_condition_is_here:
            return tt.switch(no_rand, self.mean,
                             self.histogram[self.randidx(size)])
        else:
            if no_rand:
                return self.mean
            else:
                return self.histogram[self.randidx(size)]

    @property
    def histogram(self):
        """
        Shortcut to flattened Trace
        """
        return self.shared_params

    @property
    @memoize
    def histogram_logp(self):
        """
        Symbolic logp for every point in trace
        """
        node = self.to_flat_input(self.model.logpt)

        def mapping(z):
            return theano.clone(node, {self.input: z})

        x = self.histogram
        _histogram_logp, _ = theano.scan(mapping, x, n_steps=x.shape[0])
        return _histogram_logp

    @property
    def mean(self):
        return self.histogram.mean(0)

    @classmethod
    def from_noise(cls,
                   size,
                   jitter=.01,
                   local_rv=None,
                   start=None,
                   model=None):
        """
        Initialize Histogram with random noise

        Parameters
        ----------
        size : number of initial particles
        jitter : initial sd
        local_rv : dict
            mapping {model_variable -> local_variable}
            Local Vars are used for Autoencoding Variational Bayes
            See (AEVB; Kingma and Welling, 2014) for details
        start : initial point
        model : pm.Model
            PyMC3 Model

        Returns
        -------
        Histogram
        """
        hist = cls(None, local_rv=local_rv, model=model)
        if start is None:
            start = hist.model.test_point
        start = hist._bij.map(start)
        # Initialize particles
        x0 = np.tile(start, (size, 1))
        x0 += np.random.normal(0, jitter, x0.shape)
        hist.histogram.set_value(x0)
        return hist
Exemplo n.º 6
0
class Histogram(Approximation):
    """
    Builds Approximation instance from a given trace,
    it has the same interface as variational approximation

    Prameters
    ----------
    trace : MultiTrace
    local_rv : dict
        Experimental for Histogram
        mapping {model_variable -> local_variable}
        Local Vars are used for Autoencoding Variational Bayes
        See (AEVB; Kingma and Welling, 2014) for details

    model : PyMC3 model

    Usage
    -----
    >>> with model:
    ...     step = NUTS()
    ...     trace = sample(1000, step=step)
    ...     histogram = Histogram(trace[100:])
    """
    def __init__(self, trace, local_rv=None, model=None):
        self.trace = trace
        super(Histogram, self).__init__(local_rv=local_rv, model=model)

    def check_model(self, model):
        if not all([var.name in self.trace.varnames
                    for var in model.free_RVs]):
            raise ValueError('trace has not all FreeRV')

    def _setup(self):
        self._histogram_order = ArrayOrdering(self.global_vars)
        self._bij = DictToArrayBijection(self._histogram_order, dict())

    def create_shared_params(self):
        trace = self.trace
        histogram = np.empty((len(trace), self.global_size))
        for i in range(len(trace)):
            histogram[i] = self._bij.map(trace[i])
        return theano.shared(histogram, 'histogram')

    def randidx(self, size=None):
        if size is None:
            size = ()
        elif isinstance(size, tt.TensorVariable):
            if size.ndim < 1:
                size = size[None]
            elif size.ndim > 1:
                raise ValueError('size ndim should be no more than 1d')
            else:
                pass
        else:
            size = tuple(np.atleast_1d(size))
        return (tt_rng()
                .uniform(size=size, low=0.0, high=self.histogram.shape[0] - 1e-16)
                .astype('int64'))

    def random_global(self, size=None, no_rand=False):
        theano_condition_is_here = isinstance(no_rand, tt.Variable)
        if theano_condition_is_here:
            return tt.switch(no_rand,
                             self.mean,
                             self.histogram[self.randidx(size)])
        else:
            if no_rand:
                return self.mean
            else:
                return self.histogram[self.randidx(size)]

    @property
    def histogram(self):
        """
        Shortcut to flattened Trace
        """
        return self.shared_params

    @property
    @memoize
    def histogram_logp(self):
        """
        Symbolic logp for every point in trace
        """
        node = self.to_flat_input(self.model.logpt)

        def mapping(z):
            return theano.clone(node, {self.input: z})
        x = self.histogram
        _histogram_logp, _ = theano.scan(
            mapping, x, n_steps=x.shape[0]
        )
        return _histogram_logp

    @property
    def mean(self):
        return self.histogram.mean(0)

    @property
    def params(self):
        return []