def _obstable_tree(self): if not self._obstable_tree_cache: llh = self.likelihood info = copy(llh.obstable_sm) for flh_name, flh in llh.fast_likelihoods.items(): # loop over fast likelihoods: they only have a single "measurement" m = flh.pseudo_measurement ml = flh.full_measurement_likelihood pred = ml.get_predictions_par(self.par_dict_np, self.w) for i, obs in enumerate(flh.observables): info[obs]['theory'] = pred[obs] ll_central = info[obs]['ll_central'] ll_sm = info[obs]['ll_sm'] ll = m.get_logprobability_single(obs, pred[obs]) # DeltaChi2 is -2*DeltaLogLikelihood info[obs]['pull exp.'] = pull(-2 * (ll - ll_central), dof=1) s = -1 if ll > ll_sm else 1 info[obs]['pull SM'] = s * pull(-2 * (ll - ll_sm), dof=1) for lh_name, lh in llh.likelihoods.items(): # loop over "normal" likelihoods ml = lh.measurement_likelihood pred = ml.get_predictions_par(self.par_dict_np, self.w) for i, obs in enumerate(lh.observables): info[obs]['theory'] = pred[obs] ll_central = info[obs]['ll_central'] ll_sm = info[obs]['ll_sm'] p_comb = info[obs]['exp. PDF'] ll = p_comb.logpdf([pred[obs]]) info[obs]['pull exp.'] = pull(-2 * (ll - ll_central), dof=1) s = -1 if ll > ll_sm else 1 info[obs]['pull SM'] = s * pull(-2 * (ll - ll_sm), dof=1) self._obstable_tree_cache = info return self._obstable_tree_cache
def _obstable_tree(self): if not self._obstable_tree_cache: info = tree() # nested dict pull_dof = 1 llh = self.likelihood for flh_name, flh in llh.fast_likelihoods.items(): # loop over fast likelihoods: they only have a single "measurement" m = flh.pseudo_measurement ml = flh.full_measurement_likelihood pred = ml.get_predictions_par(llh.par_dict, self.w) sm_cov = flh.sm_covariance.get(force=False) _, exp_cov = flh.exp_covariance.get(force=False) inspire_dict = self._get_inspire_dict(flh.observables, ml) for i, obs in enumerate(flh.observables): info[obs]['lh_name'] = flh_name info[obs]['name'] = obs if isinstance(obs, str) else obs[0] info[obs]['theory'] = pred[obs] info[obs]['th. unc.'] = np.sqrt(sm_cov[i, i]) info[obs]['experiment'] = m.get_central(obs) info[obs]['exp. unc.'] = np.sqrt(exp_cov[i, i]) info[obs]['exp. PDF'] = NormalDistribution( m.get_central(obs), np.sqrt(exp_cov[i, i])) info[obs]['inspire'] = sorted(set(inspire_dict[obs])) ll_central = m.get_logprobability_single( obs, m.get_central(obs)) ll = m.get_logprobability_single(obs, pred[obs]) # DeltaChi2 is -2*DeltaLogLikelihood info[obs]['pull'] = pull(-2 * (ll - ll_central), dof=pull_dof) for lh_name, lh in llh.likelihoods.items(): # loop over "normal" likelihoods ml = lh.measurement_likelihood pred = ml.get_predictions_par(llh.par_dict, self.w) inspire_dict = self._get_inspire_dict(lh.observables, ml) for i, obs in enumerate(lh.observables): obs_dict = flavio.Observable.argument_format(obs, 'dict') obs_name = obs_dict.pop('name') with warnings.catch_warnings(): warnings.simplefilter("ignore") p_comb = flavio.combine_measurements( obs_name, include_measurements=ml.get_measurements, **obs_dict) info[obs]['experiment'] = p_comb.central_value info[obs]['exp. unc.'] = max(p_comb.error_left, p_comb.error_right) info[obs]['exp. PDF'] = p_comb info[obs]['inspire'] = sorted(set(inspire_dict[obs])) info[obs]['theory'] = pred[obs] info[obs]['th. unc.'] = 0 info[obs]['lh_name'] = lh_name info[obs]['name'] = obs if isinstance(obs, str) else obs[0] ll = p_comb.logpdf([pred[obs]]) - p_comb.logpdf( [p_comb.central_value]) info[obs]['pull'] = pull(-2 * ll, dof=pull_dof) self._obstable_tree_cache = info return self._obstable_tree_cache
def predictions(filename): f = open(filename, 'at', buffering=1) chiSM = chi2() obscalc = [('<Rmue>(B+->Kll)', 1.0, 6.0), ('<Rmue>(B0->K*ll)', 0.045, 1.1), ('<Rmue>(B0->K*ll)', 1.1, 6.0)] for w in wcs: f.write(w.__name__ + '\n=================\n') chi = lambda cr, ci: chi2(w(cr, ci)) m = Minuit(chi, cr=0, ci=0, error_cr=0.01, error_ci=0.01, errordef=1, print_level=0) m.migrad() f.write('\tBest fit: ' + str(m.values[0]) + ' + ' + str(m.values[1]) + 'i\n') chibf = m.fval f.write('\tPull (sqrt): ' + str(sqrt(chiSM - chibf)) + '\n') f.write('\tPull (sigma): ' + str(pull(chiSM - chibf, 2)) + r' \sigma' + '\n') f.write('\tChi2/dof: ' + str(chibf / (nobs - 2)) + '\n') #m.minos() xr_centr = m.values[0] xi_centr = m.values[1] wcObj = w(xr_centr, xi_centr) cont = m.mncontour('cr', 'ci', numpoints=40)[2] for o in range(0, len(obscalc)): obs_centr = flavio.np_prediction(obscalc[o][0], wcObj, *obscalc[o][1:]) obs_max = obs_min = obs_centr for i in range(0, len(cont)): wcObj = w(*cont[i]) obs_max = max( obs_max, flavio.np_prediction(obscalc[o][0], wcObj, *obscalc[o][1:])) obs_min = min( obs_min, flavio.np_prediction(obscalc[o][0], wcObj, *obscalc[o][1:])) f.write('\t' + str(obscalc[o]) + ': ' + str(obs_centr) + ' + ' + str(obs_max - obs_centr) + ' - ' + str(obs_centr - obs_min) + '\n') f.write('\n\n') f.close()