def __init__(self, config, batch_size): self.batch_size = batch_size self.config = config self.constraints_gaussian = gaussian_constraint_combined( config, batch_size=self.batch_size) self.constraints_poisson = poisson_constraint_combined( config, batch_size=self.batch_size) self.viewer_aux = ParamViewer( (self.batch_size or 1, self.config.npars), self.config.par_map, self.config.auxdata_order, ) assert self.constraints_gaussian.batch_size == self.batch_size assert self.constraints_poisson.batch_size == self.batch_size indices = [] if self.constraints_gaussian.has_pdf(): indices.append(self.constraints_gaussian._normal_data) if self.constraints_poisson.has_pdf(): indices.append(self.constraints_poisson._poisson_data) if self.has_pdf(): self.constraints_tv = _TensorViewer(indices, self.batch_size)
def test_tensorviewer(backend): tb, _ = backend tv = _TensorViewer( [ tb.astensor([0, 4, 5]), tb.astensor([1, 2, 3]), tb.astensor([6]), ], names=['zzz', 'aaa', 'x'], ) data = tb.astensor(tb.astensor(list(range(7))) * 10, dtype='int') a = [tb.tolist(x) for x in tv.split(data, selection=['aaa'])] assert a == [[10, 20, 30]] a = [tb.tolist(x) for x in tv.split(data, selection=['aaa', 'zzz'])] assert a == [[10, 20, 30], [0, 40, 50]] a = [tb.tolist(x) for x in tv.split(data, selection=['zzz', 'aaa'])] assert a == [[0, 40, 50], [10, 20, 30]] a = [tb.tolist(x) for x in tv.split(data, selection=['x', 'aaa'])] assert a == [[60], [10, 20, 30]] a = [tb.tolist(x) for x in tv.split(data, selection=[])] assert a == [] a = [tb.tolist(x) for x in tv.split(data)] assert a == [[0, 40, 50], [10, 20, 30], [60]] subviewer = _TensorViewer([ tb.astensor([0]), tb.astensor([1, 2, 3]), ], names=['x', 'aaa']) assert tb.tolist(subviewer.stitch(tv.split( data, ['x', 'aaa']))) == [60, 10, 20, 30] subviewer = _TensorViewer([ tb.astensor([0, 1, 2]), tb.astensor([3]), ], names=['aaa', 'x']) assert tb.tolist(subviewer.stitch(tv.split( data, ['aaa', 'x']))) == [10, 20, 30, 60]
def _final_objective(pars, data, fixed_values, fixed_idx, variable_idx, do_stitch, objective, pdf): log.debug('jitting function') tensorlib, _ = get_backend() pars = tensorlib.astensor(pars) if do_stitch: tv = _TensorViewer([fixed_idx, variable_idx]) constrained_pars = tv.stitch( [tensorlib.astensor(fixed_values, dtype='float'), pars]) else: constrained_pars = pars return objective(constrained_pars, data, pdf)[0]
def test_stitch_pars(backend): tb, _ = backend passthrough = _make_stitch_pars() pars = ['a', 'b', 1.0, 2.0, object()] assert passthrough(pars) == pars fixed_idx = [0, 3, 4] variable_idx = [1, 2, 5] fixed_vals = [10, 40, 50] variable_vals = [20, 30, 60] tv = _TensorViewer([fixed_idx, variable_idx]) stitch_pars = _make_stitch_pars(tv, fixed_vals) pars = tb.astensor(variable_vals) assert tb.tolist(stitch_pars(pars)) == [10, 20, 30, 40, 50, 60] assert tb.tolist(stitch_pars(pars, stitch_with=tb.zeros(3))) == [ 0, 20, 30, 0, 0, 60, ]
def shim( objective, data, pdf, init_pars, par_bounds, fixed_vals=None, do_grad=False, do_stitch=False, ): """ Prepare Minimization for Optimizer. Args: objective (:obj:`func`): objective function data (:obj:`list`): observed data pdf (~pyhf.pdf.Model): The statistical model adhering to the schema model.json init_pars (:obj:`list` of :obj:`float`): The starting values of the model parameters for minimization. par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. fixed_vals (:obj:`list` of :obj:`list`/:obj:`tuple`): The pairs of index and constant value for a constant model parameter during minimization. Set to ``None`` to allow all parameters to float. .. note:: ``minimizer_kwargs`` is a dictionary containing - ``func`` (:obj:`func`): backend-wrapped ``objective`` function (potentially with gradient) - ``x0`` (:obj:`list`): modified initializations for minimizer - ``do_grad`` (:obj:`bool`): whether or not gradient is used - ``bounds`` (:obj:`list`): modified bounds for minimizer - ``fixed_vals`` (:obj:`list`): modified fixed values for minimizer .. note:: ``stitch_pars(pars, stitch_with=None)`` is a callable that will stitch the fixed parameters of the minimization back into the unfixed parameters. .. note:: ``do_stitch`` will modify the ``init_pars``, ``par_bounds``, and ``fixed_vals`` by stripping away the entries associated with fixed parameters. The parameters can be stitched back in via ``stitch_pars``. Returns: minimizer_kwargs (:obj:`dict`): arguments to pass to a minimizer following the :func:`scipy.optimize.minimize` API (see notes) stitch_pars (:obj:`func`): callable that stitches fixed parameters into the unfixed parameters """ tensorlib, _ = get_backend() fixed_vals = fixed_vals or [] fixed_idx = [x[0] for x in fixed_vals] fixed_values = [x[1] for x in fixed_vals] variable_idx = [x for x in range(pdf.config.npars) if x not in fixed_idx] if do_stitch: all_init = tensorlib.astensor(init_pars) variable_init = tensorlib.tolist( tensorlib.gather(all_init, tensorlib.astensor(variable_idx, dtype='int')) ) variable_bounds = [par_bounds[i] for i in variable_idx] # stitched out the fixed values, so we don't pass any to the underlying minimizer minimizer_fixed_vals = [] tv = _TensorViewer([fixed_idx, variable_idx]) # NB: this is a closure, tensorlib needs to be accessed at a different point in time stitch_pars = _make_stitch_pars(tv, fixed_values) else: variable_init = init_pars variable_bounds = par_bounds minimizer_fixed_vals = fixed_vals stitch_pars = _make_stitch_pars() objective_and_grad = _get_tensor_shim()( objective, tensorlib.astensor(data), pdf, stitch_pars, do_grad=do_grad, jit_pieces={ 'fixed_idx': fixed_idx, 'variable_idx': variable_idx, 'fixed_values': fixed_values, 'do_stitch': do_stitch, }, ) minimizer_kwargs = dict( func=objective_and_grad, x0=variable_init, do_grad=do_grad, bounds=variable_bounds, fixed_vals=minimizer_fixed_vals, ) return minimizer_kwargs, stitch_pars