Пример #1
0
    def __init__(
        self, rho, sigma=None, tau=None, constant=False, init=Flat.dist(), sd=None, *args, **kwargs
    ):
        super().__init__(*args, **kwargs)
        if sd is not None:
            sigma = sd

        tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
        self.sigma = self.sd = tt.as_tensor_variable(sigma)
        self.tau = tt.as_tensor_variable(tau)

        self.mean = tt.as_tensor_variable(0.0)

        if isinstance(rho, list):
            p = len(rho)
        else:
            try:
                shape_ = rho.shape.tag.test_value
            except AttributeError:
                shape_ = rho.shape

            if hasattr(shape_, "size") and shape_.size == 0:
                p = 1
            else:
                p = shape_[0]

        if constant:
            self.p = p - 1
        else:
            self.p = p

        self.constant = constant
        self.rho = rho = tt.as_tensor_variable(rho)
        self.init = init
Пример #2
0
 def __init__(self,
              tau=None,
              init=Flat.dist(),
              sigma=None,
              mu=0.0,
              sd=None,
              *args,
              **kwargs):
     kwargs.setdefault("shape", 1)
     super().__init__(*args, **kwargs)
     if sum(self.shape) == 0:
         raise TypeError(
             "GaussianRandomWalk must be supplied a non-zero shape argument!"
         )
     if sd is not None:
         sigma = sd
         warnings.warn("sd is deprecated, use sigma instead",
                       DeprecationWarning)
     tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
     self.tau = tt.as_tensor_variable(tau)
     sigma = tt.as_tensor_variable(sigma)
     self.sigma = self.sd = sigma
     self.mu = tt.as_tensor_variable(mu)
     self.init = init
     self.mean = tt.as_tensor_variable(0.0)
Пример #3
0
    def __init__(self, w, mu, sigma=None, tau=None, sd=None, comp_shape=(), *args, **kwargs):
        if sd is not None:
            sigma = sd
        _, sigma = get_tau_sigma(tau=tau, sigma=sigma)

        self.mu = mu = at.as_tensor_variable(mu)
        self.sigma = self.sd = sigma = at.as_tensor_variable(sigma)

        super().__init__(w, Normal.dist(mu, sigma=sigma, shape=comp_shape), *args, **kwargs)
Пример #4
0
    def __init__(self, w, mu, sigma=None, tau=None, sd=None, comp_shape=(), *args, **kwargs):
        if sd is not None:
            sigma = sd
            warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
        _, sigma = get_tau_sigma(tau=tau, sigma=sigma)

        self.mu = mu = tt.as_tensor_variable(mu)
        self.sigma = self.sd = sigma = tt.as_tensor_variable(sigma)

        super().__init__(w, Normal.dist(mu, sigma=sigma, shape=comp_shape), *args, **kwargs)
Пример #5
0
 def __init__(self,
              tau=None,
              init=Flat.dist(),
              sigma=None,
              mu=0.,
              sd=None,
              *args,
              **kwargs):
     kwargs.setdefault('shape', 1)
     super().__init__(*args, **kwargs)
     if sum(self.shape) == 0:
         raise TypeError("RW2 must be supplied a non-zero shape argument!")
     if sd is not None:
         sigma = sd
     tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
     self.tau = tt.as_tensor_variable(tau)
     sigma = tt.as_tensor_variable(sigma)
     self.sigma = self.sd = sigma
     self.mu = tt.as_tensor_variable(mu)
     self.init = init
     self.mean = tt.as_tensor_variable(0.)