Пример #1
0
    def add_param(self, user=None, name=None, ref=0.0, dtype="floatX"):
        if dtype == "floatX":
            dtype = aesara.config.floatX
        spec = self.__param_spec__[name]
        shape = tuple(eval(s, {"d": self.dim}) for s in spec)
        if user is None:
            if self.local:
                raise opvi.LocalGroupError(
                    "Need parameters for local group flow")
            if self.batched:
                if self.batch_size is None:
                    raise opvi.BatchedGroupError(
                        "Need batch size to infer parameter shape")
                shape = (self.batch_size, ) + shape
            return aesara.shared(
                np.asarray(np.random.normal(size=shape) * self.__jitter +
                           ref).astype(dtype),
                name=name,
            )

        else:
            if self.batched:
                if self.local or self.batch_size is None:
                    shape = (-1, ) + shape
                else:
                    shape = (self.batch_size, ) + shape
            return aet.as_tensor(user).reshape(shape)
Пример #2
0
 def __init__(self, trace=None, size=None, **kwargs):
     if kwargs.get("local_rv", None) is not None:
         raise opvi.LocalGroupError(
             "Empirical approximation does not support local variables")
     super().__init__(trace=trace, size=size, **kwargs)