Пример #1
0
    def __init__(self,
                 vars=None,
                 scaling=None,
                 step_scale=0.25,
                 is_cov=False,
                 model=None,
                 blocked=True,
                 potential=None,
                 dtype=None,
                 Emax=1000,
                 target_accept=0.8,
                 gamma=0.05,
                 k=0.75,
                 t0=10,
                 adapt_step_size=True,
                 step_rand=None,
                 **theano_kwargs):
        """Set up Hamiltonian samplers with common structures.

        Parameters
        ----------
        vars: list of theano variables
        scaling: array_like, ndim = {1,2}
            Scaling for momentum distribution. 1d arrays interpreted matrix
            diagonal.
        step_scale: float, default=0.25
            Size of steps to take, automatically scaled down by 1/n**(1/4)
        is_cov: bool, default=False
            Treat scaling as a covariance matrix/vector if True, else treat
            it as a precision matrix/vector
        model: pymc3 Model instance
        blocked: bool, default=True
        potential: Potential, optional
            An object that represents the Hamiltonian with methods `velocity`,
            `energy`, and `random` methods.
        **theano_kwargs: passed to theano functions
        """
        self._model = modelcontext(model)

        if vars is None:
            vars = self._model.cont_vars
        vars = inputvars(vars)

        super().__init__(vars,
                         blocked=blocked,
                         model=model,
                         dtype=dtype,
                         **theano_kwargs)

        self.adapt_step_size = adapt_step_size
        self.Emax = Emax
        self.iter_count = 0
        size = self._logp_dlogp_func.size

        self.step_size = step_scale / (size**0.25)
        self.step_adapt = step_sizes.DualAverageAdaptation(
            self.step_size, target_accept, gamma, k, t0)
        self.target_accept = target_accept
        self.tune = True

        if scaling is None and potential is None:
            mean = floatX(np.zeros(size))
            var = floatX(np.ones(size))
            potential = QuadPotentialDiagAdapt(size, mean, var, 10)

        if isinstance(scaling, dict):
            point = Point(scaling, model=model)
            scaling = guess_scaling(point, model=model, vars=vars)

        if scaling is not None and potential is not None:
            raise ValueError("Can not specify both potential and scaling.")

        if potential is not None:
            self.potential = potential
        else:
            self.potential = quad_potential(scaling, is_cov)

        self.integrator = integration.CpuLeapfrogIntegrator(
            self.potential, self._logp_dlogp_func)

        self._step_rand = step_rand
        self._warnings = []
        self._samples_after_tune = 0
        self._num_divs_sample = 0
import pymc3 as pm
from pymc3.step_methods.hmc import quadpotential
from pymc3.step_methods import step_sizes

n_chains = 4

with pm.Model() as m:
    x = pm.Normal("x", shape=10)
    # init == 'jitter+adapt_diag'
    start = []
    for _ in range(n_chains):
        mean = {var: val.copy() for var, val in m.test_point.items()}
        for val in mean.values():
            val[...] += 2 * np.random.rand(*val.shape) - 1
        start.append(mean)
    mean = np.mean([m.dict_to_array(vals) for vals in start], axis=0)
    var = np.ones_like(mean)
    potential = quadpotential.QuadPotentialDiagAdapt(m.ndim, mean, var, 10)
    step = pm.NUTS(potential=potential)
    trace1 = pm.sample(1000, step=step, tune=1000, cores=n_chains)

with m:  # need to be the same model
    step_size = trace1.get_sampler_stats("step_size_bar")[-1]

    step.tune = False
    step.step_adapt = step_sizes.DualAverageAdaptation(
        step_size, step.target_accept, 0.05, 0.75, 10
    )
    trace2 = pm.sample(draws=100, step=step, tune=0, cores=n_chains)
    print(trace2[-1])