def __init__(self, input=None, db='ram', name='MCMC', **kwds): """Initialize an MCMC instance. :Parameters: - input : module, list, tuple, dictionary, set, object or nothing. Model definition, in terms of Stochastics, Deterministics, Potentials and Containers. If nothing, all nodes are collected from the base namespace. - db : string The name of the database backend that will store the values of the stochastics and deterministics sampled during the MCMC loop. - verbose : integer Level of output verbosity: 0=none, 1=low, 2=medium, 3=high - **kwds : Keywords arguments to be passed to the database instantiation method. """ Sampler.__init__(self, input, db, name, calc_deviance=True, **kwds) self.step_method_dict = {} for s in self.stochastics: self.step_method_dict[s] = [] self._state = [ 'status', '_current_iter', '_iter', '_tune_interval', '_burn', '_thin' ]
def sample(self, iter, burn=0, thin=1, tune_interval=1000, save_interval=None, verbose=0): """ sample(iter, burn, thin, tune_interval, save_interval, verbose) Initialize traces, run sampling loop, clean up afterward. Calls _loop. """ self.assign_step_methods() if burn >= iter: raise ValueError, 'Burn interval must be smaller than specified number of iterations.' self._iter = int(iter) self._burn = int(burn) self._thin = int(thin) self._tune_interval = int(tune_interval) self._save_interval = save_interval length = int(np.ceil((1.0 * iter - burn) / thin)) self.max_trace_length = length # Flags for tuning self._tuning = True self._tuned_count = 0 Sampler.sample(self, iter, length, verbose)
def sample( self, iter, burn=0, thin=1, tune_interval=1000, tune_throughout=True, save_interval=None, verbose=0, progress_bar=True, ): """ sample(iter, burn, thin, tune_interval, tune_throughout, save_interval, verbose) Initialize traces, run sampling loop, clean up afterward. Calls _loop. :Parameters: - iter : int Total number of iterations to do - burn : int Variables will not be tallied until this many iterations are complete, default 0 - thin : int Variables will be tallied at intervals of this many iterations, default 1 - tune_interval : int Step methods will be tuned at intervals of this many iterations, default 1000 - tune_throughout : boolean If true, tuning will continue after the burnin period (True); otherwise tuning will halt at the end of the burnin period. - save_interval : int or None If given, the model state will be saved at intervals of this many iterations - verbose : boolean """ self.assign_step_methods(verbose=verbose) if burn >= iter: raise ValueError, "Burn interval must be smaller than specified number of iterations." self._iter = int(iter) self._burn = int(burn) self._thin = int(thin) self._tune_interval = int(tune_interval) self._tune_throughout = tune_throughout self._save_interval = save_interval length = max(int(np.floor((1.0 * iter - burn) / thin)), 1) self.max_trace_length = length # Flags for tuning self._tuning = True self._tuned_count = 0 # Progress bar self.pbar = None if not verbose and progress_bar: widgets = ["Sampling: ", Percentage(), " ", Bar(marker="0", left="[", right="]"), " ", Iterations()] self.pbar = ProgressBar(widgets=widgets, maxval=self._iter) # Run sampler Sampler.sample(self, iter, length, verbose)
def __init__(self, input=None, db='ram', eps=.001, diff_order = 5, **kwds): if not scipy_imported: raise ImportError, 'Scipy must be installed to use NormApprox and MAP.' MAP.__init__(self, input, eps, diff_order) Sampler.__init__(self, input, db, reinit_model=False, **kwds) self.C = NormApproxC(self)
def sample(self, iter, burn=0, thin=1, tune_interval=1000, tune_throughout=True, save_interval=None, verbose=0): """ sample(iter, burn, thin, tune_interval, tune_throughout, save_interval, verbose) Initialize traces, run sampling loop, clean up afterward. Calls _loop. :Parameters: - iter : int Total number of iterations to do - burn : int Variables will not be tallied until this many iterations are complete, default 0 - thin : int Variables will be tallied at intervals of this many iterations, default 1 - tune_interval : int Step methods will be tuned at intervals of this many iterations, default 1000 - tune_throughout : boolean If true, tuning will continue after the burnin period (True); otherwise tuning will halt at the end of the burnin period. - save_interval : int or None If given, the model state will be saved at intervals of this many iterations - verbose : boolean """ self.assign_step_methods() if burn >= iter: raise ValueError, 'Burn interval must be smaller than specified number of iterations.' self._iter = int(iter) self._burn = int(burn) self._thin = int(thin) self._tune_interval = int(tune_interval) self._tune_throughout = tune_throughout self._save_interval = save_interval length = int(np.ceil((1.0 * iter - burn) / thin)) self.max_trace_length = length # Flags for tuning self._tuning = True self._tuned_count = 0 Sampler.sample(self, iter, length, verbose)
def sample(self, iter, burn=0, thin=1, tune_interval=1000, tune_throughout=True, save_interval=None, verbose=0): """ sample(iter, burn, thin, tune_interval, tune_throughout, save_interval, verbose) Initialize traces, run sampling loop, clean up afterward. Calls _loop. :Parameters: - iter : int Total number of iterations to do - burn : int Variables will not be tallied until this many iterations are complete, default 0 - thin : int Variables will be tallied at intervals of this many iterations, default 1 - tune_interval : int Step methods will be tuned at intervals of this many iterations, default 1000 - tune_throughout : boolean If true, tuning will continue after the burnin period (True); otherwise tuning will halt at the end of the burnin period. - save_interval : int or None If given, the model state will be saved at intervals of this many iterations - verbose : boolean """ self.assign_step_methods() if burn >= iter: raise ValueError, 'Burn interval must be smaller than specified number of iterations.' self._iter = int(iter) self._burn = int(burn) self._thin = int(thin) self._tune_interval = int(tune_interval) self._tune_throughout = tune_throughout self._save_interval = save_interval length = int(np.ceil((1.0*iter-burn)/thin)) self.max_trace_length = length # Flags for tuning self._tuning = True self._tuned_count = 0 Sampler.sample(self, iter, length, verbose)
def __init__(self, input=None, db="ram", name="MCMC", calc_deviance=True, **kwds): """Initialize an MCMC instance. :Parameters: - input : module, list, tuple, dictionary, set, object or nothing. Model definition, in terms of Stochastics, Deterministics, Potentials and Containers. If nothing, all nodes are collected from the base namespace. - db : string The name of the database backend that will store the values of the stochastics and deterministics sampled during the MCMC loop. - verbose : integer Level of output verbosity: 0=none, 1=low, 2=medium, 3=high - **kwds : Keywords arguments to be passed to the database instantiation method. """ Sampler.__init__(self, input, db, name, calc_deviance=calc_deviance, **kwds) self._sm_assigned = False self.step_method_dict = {} for s in self.stochastics: self.step_method_dict[s] = [] self._state = ["status", "_current_iter", "_iter", "_tune_interval", "_burn", "_thin"]
def get_state(self): """ Return the sampler and step methods current state in order to restart sampling at a later time. """ self.step_methods = set() for s in self.stochastics: self.step_methods |= set(self.step_method_dict[s]) state = Sampler.get_state(self) state["step_methods"] = {} # The state of each StepMethod. for sm in self.step_methods: state["step_methods"][sm._id] = sm.current_state().copy() return state
def get_state(self): """ Return the sampler and step methods current state in order to restart sampling at a later time. """ self.step_methods = set() for s in self.stochastics: self.step_methods |= set(self.step_method_dict[s]) state = Sampler.get_state(self) state['step_methods'] = {} # The state of each StepMethod. for sm in self.step_methods: state['step_methods'][sm._id] = sm.current_state().copy() return state
def sample(self, iter, burn=0, thin=1, tune_interval=1000, tune_throughout=True, save_interval=None, verbose=0, progress_bar=True): """ sample(iter, burn, thin, tune_interval, tune_throughout, save_interval, verbose) Initialize traces, run sampling loop, clean up afterward. Calls _loop. :Parameters: - iter : int Total number of iterations to do - burn : int Variables will not be tallied until this many iterations are complete, default 0 - thin : int Variables will be tallied at intervals of this many iterations, default 1 - tune_interval : int Step methods will be tuned at intervals of this many iterations, default 1000 - tune_throughout : boolean If true, tuning will continue after the burnin period (True); otherwise tuning will halt at the end of the burnin period. - save_interval : int or None If given, the model state will be saved at intervals of this many iterations - verbose : boolean """ self.assign_step_methods(verbose=verbose) if burn >= iter: raise ValueError, 'Burn interval must be smaller than specified number of iterations.' self._iter = int(iter) self._burn = int(burn) self._thin = int(thin) self._tune_interval = int(tune_interval) self._tune_throughout = tune_throughout self._save_interval = save_interval length = max(int(np.floor((1.0 * iter - burn) / thin)), 1) self.max_trace_length = length # Flags for tuning self._tuning = True self._tuned_count = 0 # Progress bar self.pbar = None if not verbose and progress_bar: widgets = [ 'Sampling: ', Percentage(), ' ', Bar(marker='0', left='[', right=']'), ' ', Iterations() ] self.pbar = ProgressBar(widgets=widgets, maxval=self._iter) # Run sampler Sampler.sample(self, iter, length, verbose)