def apply_function(self): for container in self.data: # update uncertainty first, before the weights are changed. This step is skipped in event mode if self.error_method == "sumw2": # If computing uncertainties in events mode, warn that # hs error propagation will be skipped if self.data.data_specs == 'events': logging.trace( 'WARNING: running stage in events mode. Hypersurface error propagation will be IGNORED.' ) elif self.propagate_uncertainty: calc_uncertainty( container["weights"].get(WHERE), container["hs_scales_uncertainty"].get(WHERE), container["errors"].get(WHERE), ) container['errors'].mark_changed() else: vectorizer.imul(container["hs_scales"], out=container["errors"]) container['errors'].mark_changed() # Update weights according to hypersurfaces propagate_hs_scales(container["weights"].get(WHERE), container["hs_scales"].get(WHERE), container["weights"].get(WHERE)) container['weights'].mark_changed()
def apply_function(self): for container in self.data: # update uncertainty first, before the weights are changed if self.error_method == "sumw2": if self.propagate_uncertainty: calc_uncertainty( container["weights"].get(WHERE), container["hs_scales_uncertainty"].get(WHERE), container["errors"].get(WHERE), ) else: vectorizer.imul(container["hs_scales"], out=container["errors"]) container['errors'].mark_changed() # Update weights according to hypersurfaces vectorizer.imul(container["hs_scales"], out=container["weights"]) container['weights'].mark_changed() # Correct negative event counts that can be introduced by hypersurfaces (due to intercept) weights = container["weights"].get('host') neg_mask = weights < 0. if neg_mask.sum() > 0: weights[neg_mask] = 0. np.copyto(src=weights, dst=container["weights"].get('host')) container["weights"].mark_changed()
def apply_function(self): for container in self.data: vectorizer.imul(vals=container['survival_prob'], out=container['weights'])
def apply(self): # DO NOT USE THIS STAGE AS YOUR TEMPLATE IF YOU ARE NEW TO PISA! # -------------------------------------------------------------- # # We are overwriting the `apply` method rather than the `apply_function` method # because we are manipulating the data binning in a delicate way that doesn't # work with automatic rebinning. self.data.data_specs = self.input_specs if self.scale_errors: for container in self.data: vectorizer.pow( vals=container["errors"], pwr=2, out=container["variances"], ) input_binvols = SmartArray( self.input_specs.weighted_bin_volumes(attach_units=False).ravel()) output_binvols = SmartArray( self.output_specs.weighted_bin_volumes(attach_units=False).ravel()) for container in self.data: self.data.data_specs = self.input_specs # we want these to be SmartArrays, so no `.get(WHERE)` weights_flat_hist = container["weights"] if self.scale_errors: vars_flat_hist = container["variances"] self.data.data_specs = self.output_specs if self.rs_mode == ResampleMode.UP: # The `unroll_binning` function returns the midpoints of the bins in the # dimension `name`. fine_gridpoints = [ SmartArray( container.unroll_binning(name, self.output_specs)) for name in self.output_specs.names ] # We look up at which bin index of the input binning the midpoints of # the output binning can be found, and assign to each the content of the # bin of that index. container["weights_resampled"] = translation.lookup( fine_gridpoints, weights_flat_hist, self.input_specs, ) if self.scale_errors: container["vars_resampled"] = translation.lookup( fine_gridpoints, vars_flat_hist, self.input_specs, ) # These are the volumes of the bins we sample *from* origin_binvols = translation.lookup( fine_gridpoints, input_binvols, self.input_specs, ) # Finally, we scale the weights and variances by the ratio of the # bin volumes in place: vectorizer.imul(output_binvols, container["weights_resampled"]) vectorizer.itruediv(origin_binvols, container["weights_resampled"]) if self.scale_errors: vectorizer.imul(output_binvols, container["vars_resampled"]) vectorizer.itruediv(origin_binvols, container["vars_resampled"]) elif self.rs_mode == ResampleMode.DOWN: pass # not yet implemented if self.scale_errors: vectorizer.sqrt(vals=container["vars_resampled"], out=container["errors_resampled"])
def apply_function(self): for container in self.data: vectorizer.imul(vals=container["fold_weight"], out=container["weights"])
def apply_function(self): for container in self.data: vectorizer.imul(vals=container["adhoc_scale_factors"], out=container["weights"])
def apply_function(self): for container in self.data: if container.name in ["nutau_cc", "nutaubar_cc"]: vectorizer.imul(container["nutau_xsec_scale"], container["weights"])