コード例 #1
0
def pymc_objective(objective):
    """
    Creates a pymc3 model from an Objective. Will not be able to use the NUTS
    sampler because gradients aren't evaluable.

    Requires theano and pymc3 be installed. This is an experimental feature.

    Parameters
    ----------
    objective: refnx.analysis.Objective

    Returns
    -------
    model: pymc3.Model

    Notes
    -----
    The varying parameters are renamed 'p0', 'p1', etc, as it's vital in pymc3
    that all parameters have their own unique name.

    """
    import pymc3 as pm
    import theano.tensor as T
    from theano.compile.ops import as_op

    basic_model = pm.Model()

    wrapped_obj = _pymc_objective_wrapper(objective)

    pars = objective.varying_parameters()
    wrapped_pars = []
    with basic_model:
        # Priors for unknown model parameters
        for i, par in enumerate(pars):
            name = 'p%d' % i
            p = _to_pymc3_distribution(name, par)
            wrapped_pars.append(p)

        # Expected value of outcome
        try:
            v = wrapped_obj(*wrapped_pars)
        except Exception:
            print("Falling back, theano autodiff won't work on function"
                  " object")
            o = as_op(itypes=[T.dscalar] * len(pars),
                      otypes=[T.dvector])(wrapped_obj)
            v = o(*wrapped_pars)

        # Likelihood (sampling distribution) of observations
        y_obs = pm.Normal('Y_obs',
                          mu=v,
                          sd=objective.data.y_err,
                          observed=objective.data.y)

        if not y_obs:
            return None

    return basic_model
コード例 #2
0
ファイル: objective.py プロジェクト: llimeht/refnx
def _to_pymc3_distribution(name, par):
    """
    Create a pymc3 continuous distribution from a Bounds object.

    Parameters
    ----------
    name : str
        Name of parameter
    par : refnx.analysis.Parameter
        The parameter to wrap

    Returns
    -------
    d : pymc3.Distribution
        The pymc3 distribution

    """
    import pymc3 as pm
    import theano.tensor as T
    from theano.compile.ops import as_op

    dist = par.bounds
    # interval and both lb, ub are finite
    if (isinstance(dist, Interval) and
            np.isfinite([dist.lb, dist.ub]).all()):
        return pm.Uniform(name, dist.lb, dist.ub)
    # no bounds
    elif (isinstance(dist, Interval) and
          np.isneginf(dist.lb) and
          np.isinf(dist.lb)):
        return pm.Flat(name)
    # half open uniform
    elif isinstance(dist, Interval) and not np.isfinite(dist.lb):
        return dist.ub - pm.HalfFlat(name)
    # half open uniform
    elif isinstance(dist, Interval) and not np.isfinite(dist.ub):
        return dist.lb + pm.HalfFlat(name)

    # it's a PDF
    if isinstance(dist, PDF):
        dist_gen = getattr(dist.rv, 'dist', None)

        if isinstance(dist.rv, stats.rv_continuous):
            dist_gen = dist.rv

        if isinstance(dist_gen, type(stats.uniform)):
            if hasattr(dist.rv, 'args'):
                p = pm.Uniform(name, dist.rv.args[0],
                               dist.rv.args[1] + dist.rv.args[0])
            else:
                p = pm.Uniform(name, 0, 1)
            return p

        # norm from scipy.stats
        if isinstance(dist_gen, type(stats.norm)):
            if hasattr(dist.rv, 'args'):
                p = pm.Normal(name, mu=dist.rv.args[0], sd=dist.rv.args[1])
            else:
                p = pm.Normal(name, mu=0, sd=1)
            return p

    # not open, uniform, or normal, so fall back to DensityDist.
    d = as_op(itypes=[T.dscalar], otypes=[T.dscalar])(dist.logp)
    r = as_op(itypes=[T.dscalar], otypes=[T.dscalar])(dist.rvs)
    p = pm.DensityDist(name, d, random=r)

    return p