Exemplo n.º 1
0
def test_jax_jit_enable_stitching(caplog, do_grad, return_fitted_val):
    pyhf.set_backend(pyhf.tensor.jax_backend(precision='64b'), 'scipy')
    pdf = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + pdf.config.auxdata)

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fixed_poi_fit(
            1.0,
            data,
            pdf,
            do_grad=do_grad,
            do_stitch=False,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' in caplog.text
        caplog.clear()

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fixed_poi_fit(
            1.0,
            data,
            pdf,
            do_grad=do_grad,
            do_stitch=True,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' in caplog.text
        caplog.clear()
Exemplo n.º 2
0
def test_interpcode(backend, interpcode, random_histosets_alphasets_pair):
    pyhf.set_backend(backend)
    if isinstance(pyhf.tensorlib, pyhf.tensor.tensorflow_backend):
        tf.reset_default_graph()
        pyhf.tensorlib.session = tf.Session()

    histogramssets, alphasets = random_histosets_alphasets_pair

    # single-float precision backends, calculate using single-floats
    if isinstance(backend, pyhf.tensor.tensorflow_backend) or isinstance(
            backend, pyhf.tensor.pytorch_backend):
        histogramssets = np.asarray(histogramssets, dtype=np.float32)
        alphasets = np.asarray(alphasets, dtype=np.float32)

    slow_result = np.asarray(
        pyhf.tensorlib.tolist(
            pyhf.interpolate.interpolator(interpcode,
                                          do_tensorized_calc=False)(
                                              histogramssets=histogramssets,
                                              alphasets=alphasets)))
    fast_result = np.asarray(
        pyhf.tensorlib.tolist(
            pyhf.interpolate.interpolator(interpcode, do_tensorized_calc=True)(
                histogramssets=pyhf.tensorlib.astensor(
                    histogramssets.tolist()),
                alphasets=pyhf.tensorlib.astensor(alphasets.tolist()))))

    assert pytest.approx(slow_result[~np.isnan(slow_result)].ravel().tolist(
    )) == fast_result[~np.isnan(fast_result)].ravel().tolist()
Exemplo n.º 3
0
def _fit_model_pyhf(
    model: pyhf.pdf.Model,
    data: List[float],
    init_pars: Optional[List[float]] = None,
    fix_pars: Optional[List[bool]] = None,
    minos: Optional[Union[List[str], Tuple[str, ...]]] = None,
) -> FitResults:
    """Uses the ``pyhf.infer`` API to perform a maximum likelihood fit.

    Parameters set to be fixed in the model are held constant. The ``init_pars``
    argument allows to override the ``pyhf`` default initial parameter settings, and the
    ``fix_pars`` argument overrides which parameters are held constant.

    Args:
        model (pyhf.pdf.Model): the model to use in the fit
        data (List[float]): the data to fit the model to
        init_pars (Optional[List[float]], optional): list of initial parameter settings,
            defaults to None (use ``pyhf`` suggested inits)
        fix_pars (Optional[List[bool]], optional): list of booleans specifying which
            parameters are held constant, defaults to None (use ``pyhf`` suggestion)
        minos (Optional[Union[List[str], Tuple[str, ...]]], optional): runs the MINOS
            algorithm for all parameters specified, defaults to None (does not run
            MINOS)

    Returns:
        FitResults: object storing relevant fit results
    """
    pyhf.set_backend("numpy", pyhf.optimize.minuit_optimizer(verbose=1))

    result, result_obj = pyhf.infer.mle.fit(
        data,
        model,
        init_pars=init_pars,
        fixed_params=fix_pars,
        return_uncertainties=True,
        return_result_obj=True,
    )
    log.info(f"MINUIT status:\n{result_obj.minuit.fmin}")

    bestfit = result[:, 0]
    uncertainty = result[:, 1]
    labels = model_utils.get_parameter_names(model)
    corr_mat = result_obj.hess_inv.correlation()
    best_twice_nll = float(result_obj.fun)  # convert 0-dim np.ndarray to float

    fit_results = FitResults(bestfit, uncertainty, labels, corr_mat,
                             best_twice_nll)

    if minos is not None:
        parameters_translated = []
        for minos_par in minos:
            par_index = model_utils._get_parameter_index(minos_par, labels)
            if par_index != -1:
                # pyhf does not hand over parameter names, all parameters are known as
                # x0, x1, etc.
                parameters_translated.append(f"x{par_index}")

        _run_minos(result_obj.minuit, parameters_translated, labels)

    return fit_results
Exemplo n.º 4
0
def test_interpcode_1(backend, do_tensorized_calc):
    pyhf.set_backend(backend)
    if isinstance(pyhf.tensorlib, pyhf.tensor.tensorflow_backend):
        tf.reset_default_graph()
        pyhf.tensorlib.session = tf.Session()

    histogramssets = pyhf.tensorlib.astensor([[[[0.9], [1.0], [1.1]]]])
    alphasets = pyhf.tensorlib.astensor([[-2, -1, 0, 1, 2]])
    expected = pyhf.tensorlib.astensor([[[[0.9**2], [0.9], [1.0], [1.1],
                                          [1.1**2]]]])

    if do_tensorized_calc:
        result_deltas = pyhf.interpolate.interpolator(
            1, do_tensorized_calc=do_tensorized_calc)(histogramssets,
                                                      alphasets)
    else:
        result_deltas = pyhf.tensorlib.astensor(
            pyhf.interpolate.interpolator(
                1, do_tensorized_calc=do_tensorized_calc)(
                    pyhf.tensorlib.tolist(histogramssets),
                    pyhf.tensorlib.tolist(alphasets)))

    # calculate the actual change
    allsets_allhistos_noms_repeated = pyhf.tensorlib.einsum(
        'sa,shb->shab', pyhf.tensorlib.ones(alphasets.shape),
        histogramssets[:, :, 1])
    results = allsets_allhistos_noms_repeated * result_deltas

    assert pytest.approx(
        np.asarray(
            pyhf.tensorlib.tolist(results)).ravel().tolist()) == np.asarray(
                pyhf.tensorlib.tolist(expected)).ravel().tolist()
Exemplo n.º 5
0
def main(backend):
    """
    Example of fitting a public workspace from HEPData. The fit selects a single
    signal point from the signal patchset and then patches the background only
    model to create a workspace.
    """
    pyhf.set_backend(backend)
    print(f"Backend set to: {backend}")

    # Using the generator to cleanup automatically
    tarfile = [tgz for tgz in electroweakinos_likelihoods_download()][0]
    oneLbb_background, oneLbb_Wh_hbb_750_100_signal_patch = get_bkg_and_signal(
        tarfile,
        "1Lbb-likelihoods-hepdata",
        (750, 100),  # "C1N2_Wh_hbb_750_100(750, 100)"
    )

    print("\nStarting fit\n")
    fit_start_time = datetime.now()
    CLs_obs, CLs_exp = calculate_CLs(oneLbb_background,
                                     oneLbb_Wh_hbb_750_100_signal_patch)
    fit_end_time = datetime.now()
    fit_time = fit_end_time - fit_start_time

    print(f"fit C1N2_Wh_hbb_750_100 in {fit_time} seconds\n")
    print(f"CLs_obs: {CLs_obs}")
    print(f"CLs_exp: {CLs_exp}")
Exemplo n.º 6
0
def test_diffable_backend(jitted):
    pyhf.set_backend("jax", default=True)

    def example_op(x):
        y = pyhf.default_backend.astensor(x)
        return 2 * y

    if jitted:
        assert jax.jacrev(jax.jit(example_op))([1.0]) == [2.0]
    else:
        assert jax.jacrev(example_op)([1.0]) == [2.0]

    def example_op2(x):
        y = pyhf.default_backend.power(x, 2)
        z = pyhf.tensorlib.sum(y)
        return z

    if jitted:
        assert jax.jacrev(jax.jit(example_op2))(pyhf.tensorlib.astensor(
            [2.0, 3.0])).tolist() == [
                4.0,
                6.0,
            ]
    else:
        assert jax.jacrev(example_op2)(pyhf.tensorlib.astensor(
            [2.0, 3.0])).tolist() == [
                4.0,
                6.0,
            ]
def infer_hypotest(workspace, metadata, patches, backend):
    import time

    import pyhf

    pyhf.set_backend(backend)

    tick = time.time()
    model = workspace.model(
        patches=patches,
        modifier_settings={
            "normsys": {
                "interpcode": "code4"
            },
            "histosys": {
                "interpcode": "code4p"
            },
        },
    )
    data = workspace.data(model)
    test_poi = 1.0
    return {
        "metadata":
        metadata,
        "CLs_obs":
        float(pyhf.infer.hypotest(test_poi, data, model, test_stat="qtilde")),
        "Fit-Time":
        time.time() - tick,
    }
Exemplo n.º 8
0
def reset_backend():
    """
    This fixture is automatically run to reset the backend before and after a test function runs.
    """
    pyhf.set_backend(pyhf.default_backend)
    yield reset_backend
    pyhf.set_backend(pyhf.default_backend)
Exemplo n.º 9
0
def test_minuit_failed_optimization(monkeypatch, mocker,
                                    has_reached_call_limit, is_above_max_edm):
    class BadMinuit(iminuit.Minuit):
        @property
        def valid(self):
            return False

        @property
        def fmin(self):
            mock = mocker.MagicMock()
            mock.has_reached_call_limit = has_reached_call_limit
            mock.is_above_max_edm = is_above_max_edm
            return mock

    monkeypatch.setattr(iminuit, 'Minuit', BadMinuit)
    pyhf.set_backend('numpy', 'minuit')
    pdf = pyhf.simplemodels.hepdata_like([5], [10], [3.5])
    data = [10] + pdf.config.auxdata
    spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize')
    with pytest.raises(pyhf.exceptions.FailedMinimization) as excinfo:
        pyhf.infer.mle.fit(data, pdf)

    assert isinstance(excinfo.value.result, OptimizeResult)

    assert excinfo.match('Optimization failed')
    assert 'Optimization failed' in spy.spy_return.message
    if has_reached_call_limit:
        assert excinfo.match('Call limit was reached')
        assert 'Call limit was reached' in spy.spy_return.message
    if is_above_max_edm:
        assert excinfo.match('Estimated distance to minimum too large')
        assert 'Estimated distance to minimum too large' in spy.spy_return.message
Exemplo n.º 10
0
def test_tensorlib_setup(tensorlib, precision, mocker):
    tb = getattr(pyhf.tensor, tensorlib)(precision=precision)

    func = mocker.patch(f'pyhf.tensor.{tensorlib}._setup')
    assert func.call_count == 0
    pyhf.set_backend(tb)
    assert func.call_count == 1
Exemplo n.º 11
0
def test_custom_optimizer_name_supported():
    class custom_optimizer(object):
        def __init__(self, **kwargs):
            self.name = "scipy"

    with pytest.raises(AttributeError):
        pyhf.set_backend(pyhf.tensorlib, custom_optimizer())
Exemplo n.º 12
0
def test_jax_jit_enable_autograd(caplog, do_stitch, return_fitted_val):
    pyhf.set_backend("jax", "scipy", precision="64b")
    pdf = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + pdf.config.auxdata)

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fixed_poi_fit(
            1.0,
            data,
            pdf,
            do_grad=False,
            do_stitch=do_stitch,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' in caplog.text
        caplog.clear()

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fixed_poi_fit(
            1.0,
            data,
            pdf,
            do_grad=True,
            do_stitch=do_stitch,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' in caplog.text
        caplog.clear()
Exemplo n.º 13
0
def test_set_backend_by_bytestring(backend_name):
    pyhf.set_backend(backend_name)
    assert isinstance(
        pyhf.tensorlib,
        getattr(pyhf.tensor,
                "{0:s}_backend".format(backend_name.decode("utf-8"))),
    )
Exemplo n.º 14
0
def test_set_optimizer_by_bytestring(optimizer_name):
    pyhf.set_backend(pyhf.tensorlib, optimizer_name)
    assert isinstance(
        pyhf.optimizer,
        getattr(pyhf.optimize,
                "{0:s}_optimizer".format(optimizer_name.decode("utf-8"))),
    )
Exemplo n.º 15
0
def test_set_precision_by_string_wins(precision_level):
    conflicting_precision = "32b" if precision_level == "64b" else "64b"
    pyhf.set_backend(
        pyhf.tensor.numpy_backend(precision=conflicting_precision),
        precision=precision_level,
    )
    assert pyhf.tensorlib.precision == precision_level.lower()
Exemplo n.º 16
0
def significance(model: pyhf.pdf.Model, data: List[float]) -> SignificanceResults:
    """Calculates the discovery significance of a positive signal.

    Observed and expected p-values and significances are both calculated and reported.

    Args:
        model (pyhf.pdf.Model): model to use in fits
        data (List[float]): data (including auxdata) the model is fit to
    """
    pyhf.set_backend(pyhf.tensorlib, pyhf.optimize.minuit_optimizer(verbose=1))

    log.info("calculating discovery significance")
    obs_p_val, exp_p_val = pyhf.infer.hypotest(
        0.0, data, model, test_stat="q0", return_expected=True
    )
    obs_p_val = float(obs_p_val)
    exp_p_val = float(exp_p_val)
    obs_significance = scipy.stats.norm.isf(obs_p_val, 0, 1)
    exp_significance = scipy.stats.norm.isf(exp_p_val, 0, 1)

    log.info(f"observed p-value: {obs_p_val:.8%}")
    log.info(f"observed significance: {obs_significance:.3f}")
    log.info(f"expected p-value: {exp_p_val:.8%}")
    log.info(f"expected significance: {exp_significance:.3f}")

    significance_results = SignificanceResults(
        obs_p_val, obs_significance, exp_p_val, exp_significance
    )
    return significance_results
Exemplo n.º 17
0
def test_set_optimizer_by_bytestring(optimizer_name):
    pyhf.set_backend(pyhf.tensorlib, optimizer_name)
    assert isinstance(
        pyhf.optimizer,
        getattr(pyhf.optimize,
                f"{optimizer_name.decode('utf-8'):s}_optimizer"),
    )
Exemplo n.º 18
0
def test_pdf_eval():
    tf_sess = tf.Session()
    backends = [
        numpy_backend(poisson_from_normal=True),
        pytorch_backend(),
        tensorflow_backend(session=tf_sess),
        mxnet_backend()
    ]

    values = []
    for b in backends:
        pyhf.set_backend(b)

        source = {
            "binning": [2, -0.5, 1.5],
            "bindata": {
                "data": [120.0, 180.0],
                "bkg": [100.0, 150.0],
                "bkgsys_up": [102, 190],
                "bkgsys_dn": [98, 100],
                "sig": [30.0, 95.0]
            }
        }
        spec = {
            'channels': [{
                'name':
                'singlechannel',
                'samples': [{
                    'name':
                    'signal',
                    'data':
                    source['bindata']['sig'],
                    'modifiers': [{
                        'name': 'mu',
                        'type': 'normfactor',
                        'data': None
                    }]
                }, {
                    'name':
                    'background',
                    'data':
                    source['bindata']['bkg'],
                    'modifiers': [{
                        'name': 'bkg_norm',
                        'type': 'histosys',
                        'data': {
                            'lo_data': source['bindata']['bkgsys_dn'],
                            'hi_data': source['bindata']['bkgsys_up']
                        }
                    }]
                }]
            }]
        }
        pdf = pyhf.hfpdf(spec)
        data = source['bindata']['data'] + pdf.config.auxdata

        v1 = pdf.logpdf(pdf.config.suggested_init(), data)
        values.append(pyhf.tensorlib.tolist(v1)[0])

    assert np.std(values) < 1e-6
Exemplo n.º 19
0
def test_minuit_strategy_do_grad(mocker, backend):
    """
    ref: gh#1172

    When there is a user-provided gradient, check that one automatically sets
    the minuit strategy=0. When there is no user-provided gradient, check that
    one automatically sets the minuit strategy=1.
    """
    pyhf.set_backend(pyhf.tensorlib, 'minuit')
    spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize')
    m = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata)

    do_grad = pyhf.tensorlib.default_do_grad
    pyhf.infer.mle.fit(data, m)
    assert spy.call_count == 1
    assert not spy.spy_return.minuit.strategy == do_grad

    pyhf.infer.mle.fit(data, m, strategy=0)
    assert spy.call_count == 2
    assert spy.spy_return.minuit.strategy == 0

    pyhf.infer.mle.fit(data, m, strategy=1)
    assert spy.call_count == 3
    assert spy.spy_return.minuit.strategy == 1
Exemplo n.º 20
0
def test_pdf_eval_2():
    tf_sess = tf.Session()
    backends = [
        numpy_backend(poisson_from_normal=True),
        pytorch_backend(),
        tensorflow_backend(session=tf_sess),
        mxnet_backend()
    ]

    values = []
    for b in backends:
        pyhf.set_backend(b)

        source = {
            "binning": [2, -0.5, 1.5],
            "bindata": {
                "data": [120.0, 180.0],
                "bkg": [100.0, 150.0],
                "bkgerr": [10.0, 10.0],
                "sig": [30.0, 95.0]
            }
        }

        pdf = hepdata_like(source['bindata']['sig'], source['bindata']['bkg'],
                           source['bindata']['bkgerr'])
        data = source['bindata']['data'] + pdf.config.auxdata

        v1 = pdf.logpdf(pdf.config.suggested_init(), data)
        values.append(pyhf.tensorlib.tolist(v1)[0])

    assert np.std(values) < 1e-6
Exemplo n.º 21
0
def test_optimizer_unsupported_minimizer_options(optimizer):
    pyhf.set_backend(pyhf.default_backend, optimizer())
    m = pyhf.simplemodels.hepdata_like([5.0], [10.0], [3.5])
    data = pyhf.tensorlib.astensor([10.0] + m.config.auxdata)
    with pytest.raises(pyhf.exceptions.Unsupported) as excinfo:
        pyhf.infer.mle.fit(data, m, unsupported_minimizer_options=False)
    assert 'unsupported_minimizer_options' in str(excinfo.value)
Exemplo n.º 22
0
def test_custom_backend_name_supported():
    class custom_backend(object):
        def __init__(self, **kwargs):
            self.name = "pytorch"

    with pytest.raises(AttributeError):
        pyhf.set_backend(custom_backend())
Exemplo n.º 23
0
def test_minimize_do_grad_autoconfig(mocker, backend, backend_new):
    backend, do_grad = backend
    backend_new, do_grad_new = backend_new

    # patch all we need
    from pyhf.optimize import mixins

    shim = mocker.patch.object(mixins,
                               'shim',
                               return_value=({}, lambda x: True))
    mocker.patch.object(OptimizerMixin, '_internal_minimize')
    mocker.patch.object(OptimizerMixin, '_internal_postprocess')

    # start with first backend
    pyhf.set_backend(backend, 'scipy')
    m = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata)

    assert pyhf.tensorlib.default_do_grad == do_grad
    pyhf.infer.mle.fit(data, m)
    assert shim.call_args[1]['do_grad'] == pyhf.tensorlib.default_do_grad
    pyhf.infer.mle.fit(data, m, do_grad=not (pyhf.tensorlib.default_do_grad))
    assert shim.call_args[1]['do_grad'] != pyhf.tensorlib.default_do_grad

    # now switch to new backend and see what happens
    pyhf.set_backend(backend_new)
    m = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata)

    assert pyhf.tensorlib.default_do_grad == do_grad_new
    pyhf.infer.mle.fit(data, m)
    assert shim.call_args[1]['do_grad'] == pyhf.tensorlib.default_do_grad
    pyhf.infer.mle.fit(data, m, do_grad=not (pyhf.tensorlib.default_do_grad))
    assert shim.call_args[1]['do_grad'] != pyhf.tensorlib.default_do_grad
Exemplo n.º 24
0
def test_jax_jit(caplog, optimizer, do_grad, do_stitch, return_fitted_val):
    pyhf.set_backend("jax", optimizer, precision="64b")
    pdf = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + pdf.config.auxdata)

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fixed_poi_fit(
            1.0,
            data,
            pdf,
            do_grad=do_grad,
            do_stitch=do_stitch,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' in caplog.text
        caplog.clear()

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fixed_poi_fit(
            2.0,
            data,
            pdf,
            do_grad=do_grad,
            do_stitch=do_stitch,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' not in caplog.text

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fit(
            data,
            pdf,
            do_grad=do_grad,
            do_stitch=do_stitch,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' in caplog.text
        caplog.clear()

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fit(
            data,
            pdf,
            do_grad=do_grad,
            do_stitch=do_stitch,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' not in caplog.text

    with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'):
        pyhf.infer.mle.fixed_poi_fit(
            3.0,
            data,
            pdf,
            do_grad=do_grad,
            do_stitch=do_stitch,
            return_fitted_val=return_fitted_val,
        )  # jit
        assert 'jitting function' not in caplog.text
Exemplo n.º 25
0
def test_solver_options_scipy(mocker):
    optimizer = pyhf.optimize.scipy_optimizer(solver_options={'ftol': 1e-5})
    pyhf.set_backend('numpy', optimizer)
    assert pyhf.optimizer.solver_options == {'ftol': 1e-5}

    model = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + model.config.auxdata)
    assert pyhf.infer.mle.fit(data, model).tolist()
Exemplo n.º 26
0
def test_pdf_integration_normsys(backend):
    pyhf.set_backend(backend)
    if isinstance(pyhf.tensorlib, pyhf.tensor.tensorflow_backend):
        tf.reset_default_graph()
        pyhf.tensorlib.session = tf.Session()
    schema = json.load(open('validation/spec.json'))
    source = json.load(open('validation/data/2bin_histosys_example2.json'))
    spec = {
        'channels': [{
            'name':
            'singlechannel',
            'samples': [{
                'name':
                'signal',
                'data':
                source['bindata']['sig'],
                'modifiers': [{
                    'name': 'mu',
                    'type': 'normfactor',
                    'data': None
                }]
            }, {
                'name':
                'background',
                'data':
                source['bindata']['bkg'],
                'modifiers': [{
                    'name': 'bkg_norm',
                    'type': 'normsys',
                    'data': {
                        'lo': 0.9,
                        'hi': 1.1
                    }
                }]
            }]
        }]
    }
    jsonschema.validate(spec, schema)
    pdf = pyhf.hfpdf(spec)

    pars = [None, None]
    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [0.0]]
    assert np.allclose(
        pyhf.tensorlib.tolist(pdf.expected_data(pars, include_auxdata=False)),
        [100, 150])

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [1.0]]
    assert np.allclose(
        pyhf.tensorlib.tolist(pdf.expected_data(pars, include_auxdata=False)),
        [100 * 1.1, 150 * 1.1])

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [-1.0]]
    assert np.allclose(
        pyhf.tensorlib.tolist(pdf.expected_data(pars, include_auxdata=False)),
        [100 * 0.9, 150 * 0.9])
Exemplo n.º 27
0
def test_custom_optimizer_name_notsupported():
    class custom_optimizer(object):
        def __init__(self, **kwargs):
            self.name = "notsupported"

    optimizer = custom_optimizer()
    assert pyhf.optimizer.name != optimizer.name
    pyhf.set_backend(pyhf.tensorlib, optimizer)
    assert pyhf.optimizer.name == optimizer.name
Exemplo n.º 28
0
def test_bad_solver_options_scipy(mocker):
    optimizer = pyhf.optimize.scipy_optimizer(
        solver_options={'arbitrary_option': 'foobar'})
    pyhf.set_backend('numpy', optimizer)
    assert pyhf.optimizer.solver_options == {'arbitrary_option': 'foobar'}

    model = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0])
    data = pyhf.tensorlib.astensor([125.0] + model.config.auxdata)
    assert pyhf.infer.mle.fit(data, model).tolist()
Exemplo n.º 29
0
def test_custom_backend_name_notsupported():
    class custom_backend(object):
        def __init__(self, **kwargs):
            self.name = "notsupported"

    backend = custom_backend()
    assert pyhf.tensorlib.name != backend.name
    pyhf.set_backend(backend)
    assert pyhf.tensorlib.name == backend.name
Exemplo n.º 30
0
def fit(
    model: pyhf.pdf.Model,
    data: List[float],
    init_pars: Optional[List[float]] = None,
    fixed_pars: Optional[List[bool]] = None,
    asimov: bool = False,
    minuit_verbose: bool = False,
) -> FitResults:
    """Performs a  maximum likelihood fit, reports and returns the results.
    The asimov flag allows to fit the Asimov dataset instead of observed
    data.

    Args:
        model (pyhf.pdf.Model): Model to be used in the fit.
        data (List[float]): Data to fit the model to.
        init_pars (Optional[List[float]], optional): Initial parameter settings.
        Setting to none uses pyhf suggested ones.. Defaults to None.
        fixed_pars (Optional[List[bool]], optional): List of parameters to set to be
        fixed in the fit. Defaults to None.
        asimov (bool, optional): Asimov data or not. Defaults to False.
        minuit_verbose (bool, optional): Set minuit verbosity. Defaults to False.

    Returns:
        FitResults: Object containing fit results.
    """

    log.info("performing maximum likelihood fit")

    pyhf.set_backend("numpy",
                     pyhf.optimize.minuit_optimizer(verbose=minuit_verbose))

    result, result_obj = pyhf.infer.mle.fit(
        data,
        model,
        init_pars=init_pars,
        fixed_params=fixed_pars,
        return_uncertainties=True,
        return_result_obj=True,
    )

    bestfit = result[:, 0]
    uncertainty = result[:, 1]
    labels = model_tools.get_parameter_names(model)
    types = model_tools.get_parameter_types(model)
    corr_mat = result_obj.corr
    cov_mat = result_obj.hess_inv
    best_twice_nll = float(result_obj.fun)

    fit_result = FitResults(bestfit, uncertainty, labels, types, cov_mat,
                            corr_mat, best_twice_nll)

    log.debug(print_results(fit_result))  # type: ignore
    log.debug(
        f"-2 log(L) = {fit_result.best_twice_nll:.6f} at the best-fit point")

    return fit_result