Пример #1
0
def load_parameters(file_res, file_cov, process, constraints):
    implementation_name = process + ' SSE'
    res_dict = csv_to_dict(file_res)
    cov_dict = csv_to_dict(file_cov)
    keys_sorted = sorted(res_dict.keys())
    res = [res_dict[k] for k in keys_sorted]
    # M -> M + M^T - diag(M) since the dictionary contains only the entries above the diagonal
    cov = (np.array([[cov_dict.get((k, m), 0) for m in keys_sorted]
                     for k in keys_sorted]) +
           np.array([[cov_dict.get((m, k), 0) for m in keys_sorted]
                     for k in keys_sorted]) -
           np.diag([cov_dict[(k, k)] for k in keys_sorted]))
    parameter_names = [
        implementation_name + ' ' + coeff_name for coeff_name in keys_sorted
    ]
    for parameter_name in parameter_names:
        try:  # check if parameter object already exists
            p = Parameter.get_instance(parameter_name)
        except:  # otherwise, create a new one
            p = Parameter(parameter_name)
        else:  # if parameter exists, remove existing constraints
            constraints.remove_constraints(parameter_name)
    constraints.add_constraint(
        parameter_names,
        MultivariateNormalDistribution(central_value=res, covariance=cov))
Пример #2
0
def load_parameters(filename, process, constraints):
    implementation_name = process + ' BSZ'
    parameter_names = [
        implementation_name + ' ' + coeff_name for coeff_name in a_ff_string
    ]
    # a0_A0 and a0_T2 are not treated as independent parameters!
    parameter_names.remove(implementation_name + ' a0_A0')
    parameter_names.remove(implementation_name + ' a0_T2')
    for parameter_name in parameter_names:
        try:  # check if parameter object already exists
            p = Parameter[parameter_name]
        except:  # otherwise, create a new one
            p = Parameter(parameter_name)
            # get LaTeX representation of coefficient and form factor names
            _tex_a = tex_a[parameter_name.split(' ')[-1].split('_')[0]]
            _tex_ff = tex_ff[parameter_name.split(' ')[-1].split('_')[-1]]
            p.tex = r'$' + _tex_a + r'^{' + _tex_ff + r'}$'
            p.description = r'BSZ form factor parametrization coefficient $' + _tex_a + r'$ of $' + _tex_ff + r'$'
        else:  # if parameter exists, remove existing constraints
            constraints.remove_constraint(parameter_name)
    [central, unc, corr] = get_ffpar(filename)
    constraints.add_constraint(
        parameter_names,
        MultivariateNormalDistribution(central_value=central,
                                       covariance=np.outer(unc, unc) * corr))
Пример #3
0
def load_parameters(filename, process, constraints):
    implementation_name = process + ' BSZ'
    parameter_names = [implementation_name + ' ' + coeff_name for coeff_name in a_ff_string]
    # a0_f0 is not treated as independent parameter!
    parameter_names.remove(implementation_name + ' a0_f0')
    for parameter_name in parameter_names:
        try:  # check if parameter object already exists
            p = Parameter[parameter_name]
        except KeyError:  # if not, create a new one
            p = Parameter(parameter_name)
            # get LaTeX representation of coefficient and form factor names
            _tex_a = tex_a[parameter_name.split(' ')[-1].split('_')[0]]
            _tex_ff = tex_ff[parameter_name.split(' ')[-1].split('_')[-1]]
            p.tex = r'$' + _tex_a + r'^{' + _tex_ff + r'}$'
            p.description = r'BSZ form factor parametrization coefficient $' + _tex_a + r'$ of $' + _tex_ff + r'$'
        else:  # if parameter exists, remove existing constraints
            constraints.remove_constraint(parameter_name)
    [central, unc, corr] = get_ffpar(filename)
    constraints.add_constraint(parameter_names,
        MultivariateNormalDistribution(central_value=central, covariance=np.outer(unc, unc)*corr))
Пример #4
0
def load_parameters(filename, constraints):
    f = pkgutil.get_data('flavio.physics', filename)
    ff_dict = yaml.load(f)
    for parameter_name in ff_dict['parameters']:
        try:  # check if parameter object already exists
            p = Parameter.get_instance(parameter_name)
        except:  # otherwise, create a new one
            p = Parameter(parameter_name)
        else:  # if parameter exists, remove existing constraints
            constraints.remove_constraints(parameter_name)
    covariance = np.outer(ff_dict['uncertainties'],
                          ff_dict['uncertainties']) * ff_dict['correlation']
    if not np.allclose(covariance, covariance.T):
        # if the covariance is not symmetric, it is assumed that only the values above the diagonal are present.
        # then: M -> M + M^T - diag(M)
        covariance = covariance + covariance.T - np.diag(np.diag(covariance))
    constraints.add_constraint(
        ff_dict['parameters'],
        MultivariateNormalDistribution(central_value=ff_dict['central_values'],
                                       covariance=covariance))
def load_parameters(file_res, file_cov, process, constraints):
    implementation_name = process + ' SSE'
    res_dict = csv_to_dict(file_res)
    cov_dict = csv_to_dict(file_cov)
    keys_sorted = sorted(res_dict.keys())
    res = [res_dict[k] for k in keys_sorted]
    cov = np.array([[ cov_dict.get((k,m),0) for m in keys_sorted] for k in keys_sorted])
    parameter_names = [implementation_name + ' ' + translate_parameters(coeff_name) for coeff_name in keys_sorted]
    for parameter_name in parameter_names:
        try: # check if parameter object already exists
            p = Parameter[parameter_name]
        except: # otherwise, create a new one
            p = Parameter(parameter_name)
            _tex_a = tex_a[parameter_name.split(' ')[-1].split('_')[0]]
            _tex_ff = tex_ff[parameter_name.split(' ')[-1].split('_')[-1]]
            p.tex = r'$' + _tex_a + r'^{' + _tex_ff + r'}$'
            p.description = r'SSE form factor parametrization coefficient $' + _tex_a + r'$ of $' + _tex_ff + r'$'
        else: # if parameter exists, remove existing constraints
            constraints.remove_constraint(parameter_name)
    constraints.add_constraint(parameter_names,
            MultivariateNormalDistribution(central_value=res, covariance=cov ))
Пример #6
0
    def test_profiler(self):
        # defining some dummy parameters and observables
        Parameter('tmp a')
        Parameter('tmp b')
        Parameter('tmp c')
        Parameter('tmp d')
        p = ParameterConstraints()
        p.set_constraint('tmp b', '2+-0.3')
        p.set_constraint('tmp c', '0.2+-0.1')
        p.set_constraint('tmp d', '1+-0.5')

        def prediction(wc_obj, par):
            return par['tmp a']**2 + par['tmp b'] + par['tmp c'] + par[
                'tmp d']**2

        flavio.Observable('tmp obs')
        Prediction('tmp obs', prediction)
        m = Measurement('tmp measurement')
        m.add_constraint(['tmp obs'],
                         flavio.statistics.probability.NormalDistribution(
                             1, 0.2))
        # test 1D profiler
        fit_1d = FrequentistFit('test profiler 1d', p, ['tmp a'],
                                ['tmp b', 'tmp c', 'tmp d'], ['tmp obs'])
        profiler_1d = profiler.Profiler1D(fit_1d, -10, 10)
        x, z, n = profiler_1d.run(steps=4)
        self.assertEqual(x.shape, (4, ))
        self.assertEqual(z.shape, (4, ))
        self.assertEqual(n.shape, (3, 4))
        npt.assert_array_equal(x, profiler_1d.x)
        npt.assert_array_equal(z, profiler_1d.log_profile_likelihood)
        npt.assert_array_equal(n, profiler_1d.profile_nuisance)
        pdat = profiler_1d.pvalue_prob_plotdata()
        npt.assert_array_equal(pdat['x'], x)
        # test 2D profiler
        p.remove_constraint('d')
        fit_2d = FrequentistFit('test profiler 2d', p, ['tmp a', 'tmp d'],
                                ['tmp b', 'tmp c'], ['tmp obs'])
        profiler_2d = profiler.Profiler2D(fit_2d, -10, 10, -10, 10)
        x, y, z, n = profiler_2d.run(steps=(3, 4))
        self.assertEqual(x.shape, (3, ))
        self.assertEqual(y.shape, (4, ))
        self.assertEqual(z.shape, (3, 4))
        self.assertEqual(n.shape, (2, 3, 4))
        npt.assert_array_equal(x, profiler_2d.x)
        npt.assert_array_equal(y, profiler_2d.y)
        npt.assert_array_equal(z, profiler_2d.log_profile_likelihood)
        npt.assert_array_equal(n, profiler_2d.profile_nuisance)
        pdat = profiler_2d.contour_plotdata()
        npt.assert_array_almost_equal(pdat['z'], -2 * (z - np.max(z)))
        # delete dummy instances
        for p in ['tmp a', 'tmp b', 'tmp c', 'tmp d']:
            Parameter.del_instance(p)
        FrequentistFit.del_instance('test profiler 1d')
        Observable.del_instance('tmp obs')
        Measurement.del_instance('tmp measurement')
Пример #7
0
def load_parameters(filename, constraints):
    f = pkgutil.get_data('flavio.physics', filename)
    ff_dict = yaml.load(f)
    for parameter_name in ff_dict['parameters']:
        try: # check if parameter object already exists
            p = Parameter.get_instance(parameter_name)
        except: # otherwise, create a new one
            p = Parameter(parameter_name)
        else: # if parameter exists, remove existing constraints
            constraints.remove_constraints(parameter_name)
    covariance = np.outer(ff_dict['uncertainties'], ff_dict['uncertainties'])*ff_dict['correlation']
    if not np.allclose(covariance, covariance.T):
        # if the covariance is not symmetric, it is assumed that only the values above the diagonal are present.
        # then: M -> M + M^T - diag(M)
        covariance = covariance + covariance.T - np.diag(np.diag(covariance))
    constraints.add_constraint(ff_dict['parameters'],
            MultivariateNormalDistribution(central_value=ff_dict['central_values'], covariance=covariance) )
Пример #8
0
def load_parameters(file_res, file_cov, process, constraints):
    implementation_name = process + ' SSE'
    res_dict = csv_to_dict(file_res)
    cov_dict = csv_to_dict(file_cov)
    keys_sorted = sorted(res_dict.keys())
    res = [res_dict[k] for k in keys_sorted]
    # M -> M + M^T - diag(M) since the dictionary contains only the entries above the diagonal
    cov = ( np.array([[ cov_dict.get((k,m),0) for m in keys_sorted] for k in keys_sorted])
          + np.array([[ cov_dict.get((m,k),0) for m in keys_sorted] for k in keys_sorted])
          - np.diag([ cov_dict[(k,k)] for k in keys_sorted]) )
    parameter_names = [implementation_name + ' ' + coeff_name for coeff_name in keys_sorted]
    for parameter_name in parameter_names:
        try: # check if parameter object already exists
            p = Parameter.get_instance(parameter_name)
        except: # otherwise, create a new one
            p = Parameter(parameter_name)
        else: # if parameter exists, remove existing constraints
            constraints.remove_constraints(parameter_name)
    constraints.add_constraint(parameter_names,
            MultivariateNormalDistribution(central_value=res, covariance=cov ))
Пример #9
0
import flavio
import flavio.statistics.fits
from flavio.physics.running.running import get_alpha
from flavio.physics import ckm
from flavio.classes import Observable, Prediction, Measurement, Parameter
from flavio.statistics.probability import NormalDistribution
from flavio.statistics.functions import pull
import yaml
from iminuit import Minuit
import numpy as np

par = flavio.default_parameters.get_central_all()
GF = par['GF']
sq2 = sqrt(2)

DMs_SM = Parameter('Delta M_S')
flavio.default_parameters.set_constraint('Delta M_S', '20.01 ± 1.25')


def myDeltaMS(wc_obj, par):
    DMs_SM = par['Delta M_S']
    if wc_obj.wc is None:
        return DMs_SM
    else:
        Cbs = -sq2 / (4 * GF *
                      ckm.xi('t', 'bs')(par)**2) * wc_obj.wc['CVLL_bsbs']
        return DMs_SM * abs(1 + Cbs / (1.3397e-3))


Observable('DMs')
Prediction('DMs', myDeltaMS)
Пример #10
0
 def test_profiler(self):
     # defining some dummy parameters and observables
     Parameter('tmp a');
     Parameter('tmp b');
     Parameter('tmp c');
     Parameter('tmp d');
     p = ParameterConstraints()
     p.set_constraint('tmp b', '2+-0.3')
     p.set_constraint('tmp c', '0.2+-0.1')
     p.set_constraint('tmp d', '1+-0.5')
     def prediction(wc_obj, par):
         return par['tmp a']**2+par['tmp b']+par['tmp c']+par['tmp d']**2
     flavio.Observable('tmp obs');
     Prediction('tmp obs', prediction);
     m=Measurement('tmp measurement')
     m.add_constraint(['tmp obs'],
                 flavio.statistics.probability.NormalDistribution(1, 0.2))
     # test 1D profiler
     fit_1d = FrequentistFit('test profiler 1d',
                                 p, ['tmp a'], ['tmp b', 'tmp c', 'tmp d'], ['tmp obs'])
     profiler_1d = profiler.Profiler1D(fit_1d, -10, 10)
     x, z, n = profiler_1d.run(steps=4)
     self.assertEqual(x.shape, (4,))
     self.assertEqual(z.shape, (4,))
     self.assertEqual(n.shape, (3, 4))
     npt.assert_array_equal(x, profiler_1d.x)
     npt.assert_array_equal(z, profiler_1d.log_profile_likelihood)
     npt.assert_array_equal(n, profiler_1d.profile_nuisance)
     pdat = profiler_1d.pvalue_prob_plotdata()
     npt.assert_array_equal(pdat['x'], x)
     # test multiprocessing
     for threads in [2, 3, 4]:
         xt, zt, nt = profiler_1d.run(steps=4, threads=threads)
         npt.assert_array_almost_equal(x, xt, decimal=4)
         npt.assert_array_almost_equal(z, zt, decimal=4)
         npt.assert_array_almost_equal(n, nt, decimal=4)
     with self.assertRaises(ValueError):
         profiler_1d.run(steps=4, threads=5)
     # test 2D profiler
     p.remove_constraint('d')
     fit_2d = FrequentistFit('test profiler 2d',
                                 p, ['tmp a', 'tmp d'], ['tmp b', 'tmp c'], ['tmp obs'])
     profiler_2d = profiler.Profiler2D(fit_2d, -10, 10, -10, 10)
     x, y, z, n = profiler_2d.run(steps=(3,4))
     self.assertEqual(x.shape, (3,))
     self.assertEqual(y.shape, (4,))
     self.assertEqual(z.shape, (3, 4))
     self.assertEqual(n.shape, (2, 3, 4))
     npt.assert_array_equal(x, profiler_2d.x)
     npt.assert_array_equal(y, profiler_2d.y)
     npt.assert_array_equal(z, profiler_2d.log_profile_likelihood)
     npt.assert_array_equal(n, profiler_2d.profile_nuisance)
     pdat = profiler_2d.contour_plotdata()
     npt.assert_array_almost_equal(pdat['z'], -2*(z-np.max(z)))
     # test multiprocessing
     for threads in [2, 5, 12]:
         xt, yt, zt, nt = profiler_2d.run(steps=(3,4))
         npt.assert_array_almost_equal(x, xt, decimal=4)
         npt.assert_array_almost_equal(y, yt, decimal=4)
         npt.assert_array_almost_equal(z, zt, decimal=4)
         npt.assert_array_almost_equal(n, nt, decimal=4)
     with self.assertRaises(ValueError):
         profiler_2d.run(steps=(3,4), threads=13)
     # delete dummy instances
     for p in ['tmp a', 'tmp b', 'tmp c', 'tmp d']:
         Parameter.del_instance(p)
     FrequentistFit.del_instance('test profiler 1d')
     Observable.del_instance('tmp obs')
     Measurement.del_instance('tmp measurement')