示例#1
0
 def test_convert_dims(self):
     assert convert_dims(dims="town") == ("town", )
     with pytest.raises(ValueError, match="must be a tuple, str or list"):
         convert_dims(3)
     with pytest.raises(ValueError,
                        match="may only appear in the last position"):
         convert_dims(dims=(..., "town"))
示例#2
0
def _make_rv_and_resize_shape(
    *,
    cls,
    dims: Optional[Dims],
    model,
    observed,
    args,
    **kwargs,
) -> Tuple[Variable, Optional[WeakDims], Optional[Union[np.ndarray, Variable]],
           StrongShape]:
    """Creates the RV and processes dims or observed to determine a resize shape."""
    # Create the RV without dims information, because that's not something tracked at the Aesara level.
    # If necessary we'll later replicate to a different size implied by already known dims.
    rv_out = cls.dist(*args, **kwargs)
    ndim_actual = rv_out.ndim
    resize_shape = None

    # # `dims` are only available with this API, because `.dist()` can be used
    # # without a modelcontext and dims are not tracked at the Aesara level.
    dims = convert_dims(dims)
    dims_can_resize = kwargs.get("shape", None) is None and kwargs.get(
        "size", None) is None
    if dims is not None:
        if dims_can_resize:
            resize_shape, dims = resize_from_dims(dims, ndim_actual, model)
        elif Ellipsis in dims:
            # Replace ... with None entries to match the actual dimensionality.
            dims = (*dims[:-1], *[None] * ndim_actual)[:ndim_actual]
    elif observed is not None:
        resize_shape, observed = resize_from_observed(observed, ndim_actual)
    return rv_out, dims, observed, resize_shape
示例#3
0
def get_steps(
    steps: Optional[Union[int, np.ndarray, TensorVariable]],
    *,
    shape: Optional[Shape] = None,
    dims: Optional[Dims] = None,
    observed: Optional[Any] = None,
    step_shape_offset: int = 0,
):
    """Extract number of steps from shape / dims / observed information

    Parameters
    ----------
    steps:
        User specified steps for timeseries distribution
    shape:
        User specified shape for timeseries distribution
    dims:
        User specified dims for timeseries distribution
    observed:
        User specified observed data from timeseries distribution
    step_shape_offset:
        Difference between last shape dimension and number of steps in timeseries
        distribution, defaults to 0

    Returns
    -------
    steps
        Steps, if specified directly by user, or inferred from the last dimension of
        shape / dims / observed. When two sources of step information are provided,
        a symbolic Assert is added to ensure they are consistent.
    """
    inferred_steps = None
    if shape is not None:
        shape = to_tuple(shape)
        if shape[-1] is not ...:
            inferred_steps = shape[-1] - step_shape_offset

    if inferred_steps is None and dims is not None:
        dims = convert_dims(dims)
        if dims[-1] is not ...:
            model = modelcontext(None)
            inferred_steps = model.dim_lengths[dims[-1]] - step_shape_offset

    if inferred_steps is None and observed is not None:
        observed = convert_observed_data(observed)
        inferred_steps = observed.shape[-1] - step_shape_offset

    if inferred_steps is None:
        inferred_steps = steps
    # If there are two sources of information for the steps, assert they are consistent
    elif steps is not None:
        inferred_steps = Assert(msg="Steps do not match last shape dimension")(
            inferred_steps, at.eq(inferred_steps, steps)
        )
    return inferred_steps
示例#4
0
    def __new__(
        cls,
        name: str,
        *args,
        rng=None,
        dims: Optional[Dims] = None,
        initval=None,
        observed=None,
        total_size=None,
        transform=UNSET,
        **kwargs,
    ) -> RandomVariable:
        """Adds a RandomVariable corresponding to a PyMC distribution to the current model.

        Note that all remaining kwargs must be compatible with ``.dist()``

        Parameters
        ----------
        cls : type
            A PyMC distribution.
        name : str
            Name for the new model variable.
        rng : optional
            Random number generator to use with the RandomVariable.
        dims : tuple, optional
            A tuple of dimension names known to the model.
        initval : optional
            Numeric or symbolic untransformed initial value of matching shape,
            or one of the following initial value strategies: "moment", "prior".
            Depending on the sampler's settings, a random jitter may be added to numeric, symbolic
            or moment-based initial values in the transformed space.
        observed : optional
            Observed data to be passed when registering the random variable in the model.
            See ``Model.register_rv``.
        total_size : float, optional
            See ``Model.register_rv``.
        transform : optional
            See ``Model.register_rv``.
        **kwargs
            Keyword arguments that will be forwarded to ``.dist()``.
            Most prominently: ``shape`` and ``size``

        Returns
        -------
        rv : RandomVariable
            The created RV, registered in the Model.
        """

        try:
            from pymc.model import Model

            model = Model.get_context()
        except TypeError:
            raise TypeError("No model on context stack, which is needed to "
                            "instantiate distributions. Add variable inside "
                            "a 'with model:' block, or use the '.dist' syntax "
                            "for a standalone distribution.")

        if "testval" in kwargs:
            initval = kwargs.pop("testval")
            warnings.warn(
                "The `testval` argument is deprecated; use `initval`.",
                DeprecationWarning,
                stacklevel=2,
            )

        if not isinstance(name, string_types):
            raise TypeError(f"Name needs to be a string but got: {name}")

        if rng is None:
            rng = model.next_rng()

        if dims is not None and "shape" in kwargs:
            raise ValueError(
                f"Passing both `dims` ({dims}) and `shape` ({kwargs['shape']}) is not supported!"
            )
        if dims is not None and "size" in kwargs:
            raise ValueError(
                f"Passing both `dims` ({dims}) and `size` ({kwargs['size']}) is not supported!"
            )
        dims = convert_dims(dims)

        # Create the RV without dims information, because that's not something tracked at the Aesara level.
        # If necessary we'll later replicate to a different size implied by already known dims.
        rv_out = cls.dist(*args, rng=rng, **kwargs)
        ndim_actual = rv_out.ndim
        resize_shape = None

        # `dims` are only available with this API, because `.dist()` can be used
        # without a modelcontext and dims are not tracked at the Aesara level.
        if dims is not None:
            ndim_resize, resize_shape, dims = resize_from_dims(
                dims, ndim_actual, model)
        elif observed is not None:
            ndim_resize, resize_shape, observed = resize_from_observed(
                observed, ndim_actual)

        if resize_shape:
            # A batch size was specified through `dims`, or implied by `observed`.
            rv_out = change_rv_size(rv_var=rv_out,
                                    new_size=resize_shape,
                                    expand=True)

        rv_out = model.register_rv(
            rv_out,
            name,
            observed,
            total_size,
            dims=dims,
            transform=transform,
            initval=initval,
        )

        # add in pretty-printing support
        rv_out.str_repr = types.MethodType(str_for_dist, rv_out)
        rv_out._repr_latex_ = types.MethodType(
            functools.partial(str_for_dist, formatting="latex"), rv_out)

        rv_out.logp = _make_nice_attr_error("rv.logp(x)", "pm.logp(rv, x)")
        rv_out.logcdf = _make_nice_attr_error("rv.logcdf(x)",
                                              "pm.logcdf(rv, x)")
        rv_out.random = _make_nice_attr_error("rv.random()", "rv.eval()")
        return rv_out