def auto_assign_sampler( model: Model, sampler_type: Optional[str] = None, ): """ The toy implementation of sampler assigner Parameters ---------- model : pymc4.Model Model to sample posterior for sampler_type : Optional[str] The step method type for the model Returns ------- sampler_type : str Sampler type name """ if sampler_type: _log.info("Working with {} sampler".format(reg_samplers[sampler_type].__name__)) return sampler_type _, _, free_disc_names, free_cont_names, _, _ = initialize_state(model) if not free_disc_names: _log.info("Auto-assigning NUTS sampler") return "nuts" else: _log.info("The model contains discrete distributions. " "\nCompound step is chosen.") return "compound"
def __init__( self, model: Model, **kwargs, ): if not isinstance(model, Model): raise TypeError( "`sample` function only supports `pymc4.Model` objects, but \ you've passed `{}`".format(type(model))) _, _, disc_names, cont_names, _, _ = initialize_state(model) # if sampler has the gradient calculation during `one_step` # and the model contains discrete distributions then we throw the # error. if self._grad is True and disc_names: raise ValueError("Discrete distributions can't be used with \ gradient-based sampler") self.model = model self.stat_names: List[str] = [] self.parent_inds: List[int] = [] # assign arguments from **kwargs to distinct kwargs for # `kernel`, `adaptation_kernel`, `chain_sampler` self._assign_arguments(kwargs) # check arguments for correctness self._check_arguments() self._bound_kwargs()
def check_proposal_functions( model: Model, state: Optional[flow.SamplingState] = None, observed: Optional[dict] = None, ) -> bool: """ Check for the non-default proposal generation functions Parameters ---------- model : pymc4.Model Model to sample posterior for state : Optional[flow.SamplingState] Current state observed : Optional[Dict[str, Any]] Observed values (optional) """ (_, state, _, _, continuous_distrs, discrete_distrs) = initialize_state( model, observed=observed, state=state ) init = state.all_unobserved_values init_state = list(init.values()) init_keys = list(init.keys()) for i, state_part in enumerate(init_state): untrs_var, unscoped_tr_var = scope_remove_transformed_part_if_required( init_keys[i], state.transformed_values ) # get the distribution for the random variable name distr = continuous_distrs.get(untrs_var, None) if distr is None: distr = discrete_distrs[untrs_var] func = distr._default_new_state_part if callable(func): return True return False
def _assign_default_methods( self, *, sampler_methods: Optional[List] = None, state: Optional[flow.SamplingState] = None, observed: Optional[dict] = None, ): converted_sampler_methods: List = CompoundStep._convert_sampler_methods( sampler_methods) (_, state, _, _, continuous_distrs, discrete_distrs) = initialize_state(self.model, observed=observed, state=state) init = state.all_unobserved_values init_state = list(init.values()) init_keys = list(init.keys()) # assignd samplers for free variables make_kernel_fn: list = [] # user passed kwargs for each sampler in `make_kernel_fn` part_kernel_kwargs: list = [] # keep the list for proposal func names func_names: list = [] for i, state_part in enumerate(init_state): untrs_var, unscoped_tr_var = scope_remove_transformed_part_if_required( init_keys[i], state.transformed_values) # get the distribution for the random variable name distr = continuous_distrs.get(untrs_var, None) if distr is None: distr = discrete_distrs[untrs_var] # get custom `new_state_fn` for the distribution func = distr._default_new_state_part # simplest way of assigning sampling methods # if the sampler_methods was passed and if a var is provided # then the var will be assigned to the given sampler # but will also be checked if the sampler supports the distr # 1. If sampler is provided by the user, we create new sampler # and add to `make_kernel_fn` # 2. If the distribution has `new_state_fn` then the new sampler # should be create also. Because sampler is initialized with # the `new_state_fn` argument. if unscoped_tr_var in converted_sampler_methods: sampler, kwargs = converted_sampler_methods[unscoped_tr_var] # check for the sampler able to sampler from the distribution if not distr._grad_support and sampler._grad: raise ValueError( "The `{}` doesn't support gradient, please provide an appropriate sampler method" .format(unscoped_tr_var)) # add sampler to the dict make_kernel_fn.append(sampler) part_kernel_kwargs.append({}) # update with user provided kwargs part_kernel_kwargs[-1].update(kwargs) # if proposal function is provided then replace func = part_kernel_kwargs[-1].get("new_state_fn", func) # add the default `new_state_fn` for the distr # `new_state_fn` is supported for only RandomWalkMetropolis transition # kernel. if func and sampler._name == "rwm": part_kernel_kwargs[-1]["new_state_fn"] = partial(func)() elif callable(func): # If distribution has defined `new_state_fn` attribute then we need # to assign `RandomWalkMetropolis` transition kernel make_kernel_fn.append(RandomWalkM) part_kernel_kwargs.append({}) part_kernel_kwargs[-1]["new_state_fn"] = partial(func)() else: # by default if user didn't not provide any sampler # we choose NUTS for the variable with gradient and # RWM for the variable without the gradient sampler = NUTS if distr._grad_support else RandomWalkM make_kernel_fn.append(sampler) part_kernel_kwargs.append({}) # _log.info("Auto-assigning NUTS sampler...") # save proposal func names func_names.append(func._name if func else "default") # `make_kernel_fn` contains (len(state)) sampler methods, this could lead # to more overhed when we are iterating at each call of `one_step` in the # compound step kernel. For that we need to merge some of the samplers. kernels, set_lengths = self._merge_samplers(make_kernel_fn, part_kernel_kwargs) # log variable sampler mapping CompoundStep._log_variables(init_keys, kernels, set_lengths, self.parent_inds, func_names) # save to use late for compound kernel init self.kernel_kwargs["compound_samplers"] = kernels self.kernel_kwargs["compound_set_lengths"] = set_lengths