Пример #1
0
def _create_sampler(model,
                    nprocs,
                    nchains=None,
                    betas=None,
                    swap_interval=1,
                    seed=None,
                    proposals=None,
                    set_start=True):
    """Creates a sampler."""
    if nchains is None:
        nchains = NCHAINS
    if betas is None:
        betas = BETAS
    ntemps = len(betas)
    if nprocs == 1:
        pool = None
    else:
        pool = multiprocessing.Pool(nprocs)
    sampler = ParallelTemperedSampler(model.params,
                                      model,
                                      nchains,
                                      betas=betas,
                                      seed=seed,
                                      swap_interval=swap_interval,
                                      proposals=proposals,
                                      pool=pool)
    if set_start:
        sampler.start_position = model.prior_rvs(size=nchains * ntemps,
                                                 shape=(ntemps, nchains))
    return sampler
Пример #2
0
    def __init__(self, model, nchains, ntemps=None, betas=None,
                 proposals=None, default_proposal=None,
                 default_proposal_args=None, seed=None,
                 swap_interval=1,
                 checkpoint_interval=None, checkpoint_signal=None,
                 loglikelihood_function=None,
                 nprocesses=1, use_mpi=False):

        # create the betas if not provided
        if betas is None:
            betas = default_beta_ladder(len(model.variable_params),
                                        ntemps=ntemps)
        self.model = model
        # create a wrapper for calling the model
        model_call = _EpsieCallModel(model, loglikelihood_function)
        # Set up the pool
        if nprocesses > 1:
            # these are used to help paralleize over multiple cores / MPI
            models._global_instance = model_call
            model_call = models._call_global_model
        pool = choose_pool(mpi=use_mpi, processes=nprocesses)
        if pool is not None:
            pool.count = nprocesses
        # initialize the sampler
        self._sampler = ParallelTemperedSampler(
            model.sampling_params, model_call, nchains, betas=betas,
            swap_interval=swap_interval,
            proposals=proposals, default_proposal=default_proposal,
            default_proposal_args=default_proposal_args,
            seed=seed, pool=pool)
        # set other parameters
        self._nwalkers = nchains
        self._ntemps = ntemps
        self._checkpoint_interval = checkpoint_interval
        self._checkpoint_signal = checkpoint_signal