Exemplo n.º 1
0
def test_pdf_eval():
    tf_sess = tf.Session()
    backends = [
        numpy_backend(poisson_from_normal=True),
        pytorch_backend(),
        tensorflow_backend(session=tf_sess),
        mxnet_backend()
    ]

    values = []
    for b in backends:
        pyhf.set_backend(b)

        source = {
            "binning": [2, -0.5, 1.5],
            "bindata": {
                "data": [120.0, 180.0],
                "bkg": [100.0, 150.0],
                "bkgsys_up": [102, 190],
                "bkgsys_dn": [98, 100],
                "sig": [30.0, 95.0]
            }
        }
        spec = {
            'channels': [{
                'name':
                'singlechannel',
                'samples': [{
                    'name':
                    'signal',
                    'data':
                    source['bindata']['sig'],
                    'modifiers': [{
                        'name': 'mu',
                        'type': 'normfactor',
                        'data': None
                    }]
                }, {
                    'name':
                    'background',
                    'data':
                    source['bindata']['bkg'],
                    'modifiers': [{
                        'name': 'bkg_norm',
                        'type': 'histosys',
                        'data': {
                            'lo_data': source['bindata']['bkgsys_dn'],
                            'hi_data': source['bindata']['bkgsys_up']
                        }
                    }]
                }]
            }]
        }
        pdf = pyhf.hfpdf(spec)
        data = source['bindata']['data'] + pdf.config.auxdata

        v1 = pdf.logpdf(pdf.config.suggested_init(), data)
        values.append(pyhf.tensorlib.tolist(v1)[0])

    assert np.std(values) < 1e-6
Exemplo n.º 2
0
def test_import_prepHistFactory():
    schema = json.load(open('validation/spec.json'))
    parsed_xml = pyhf.readxml.parse('validation/xmlimport_input/config/example.xml',
                              'validation/xmlimport_input/')

    # build the spec, strictly checks properties included
    spec = {'channels': parsed_xml['channels']}
    jsonschema.validate(spec, schema)
    pdf = pyhf.hfpdf(spec, poiname='SigXsecOverSM')

    data = [binvalue for k in pdf.spec['channels'] for binvalue
            in parsed_xml['data'][k['name']]] + pdf.config.auxdata

    channels = {channel['name'] for channel in pdf.spec['channels']}
    samples = {channel['name']: [sample['name'] for sample in channel['samples']] for channel in pdf.spec['channels']}

    assert data == [122.0, 112.0, 0, 0, 1.0, 1.0, 0.0]
    ###
    ### signal overallsys
    ### bkg1 overallsys (stat ignored)
    ### bkg2 stateror (2 bins)
    ### bkg2 overallsys

    assert 'channel1' in channels
    assert 'signal' in samples['channel1']
    assert 'background1' in samples['channel1']
    assert 'background2' in samples['channel1']

    assert pdf.expected_actualdata(
        pdf.config.suggested_init()).tolist() == [120.0, 110.0]

    pars = pdf.config.suggested_init()
    pars[pdf.config.par_slice('SigXsecOverSM')] = [2.0]
    assert pdf.expected_data(
        pars, include_auxdata=False).tolist() == [140, 120]
Exemplo n.º 3
0
def test_pdf_integration_shapesys():
    schema = json.load(open('validation/spec.json'))
    source = json.load(open('validation/data/2bin_histosys_example2.json'))
    spec = {
        'channels': [{
            'name':
            'singlechannel',
            'samples': [{
                'name':
                'signal',
                'data':
                source['bindata']['sig'],
                'modifiers': [{
                    'name': 'mu',
                    'type': 'normfactor',
                    'data': None
                }]
            }, {
                'name':
                'background',
                'data':
                source['bindata']['bkg'],
                'modifiers': [{
                    'name': 'bkg_norm',
                    'type': 'shapesys',
                    'data': [10, 10]
                }]
            }]
        }]
    }
    jsonschema.validate(spec, schema)
    pdf = pyhf.hfpdf(spec)

    pars = [None, None]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [1.0, 1.0]]
    assert pdf.expected_data(pars,
                             include_auxdata=False).tolist() == [100, 150]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [1.1, 1.0]]
    assert pdf.expected_data(
        pars, include_auxdata=False).tolist() == [100 * 1.1, 150]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [1.0, 1.1]]
    assert pdf.expected_data(
        pars, include_auxdata=False).tolist() == [100, 150 * 1.1]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [1.1, 0.9]]
    assert pdf.expected_data(
        pars, include_auxdata=False).tolist() == [100 * 1.1, 150 * 0.9]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [0.9, 1.1]]
    assert pdf.expected_data(
        pars, include_auxdata=False).tolist() == [100 * 0.9, 150 * 1.1]
Exemplo n.º 4
0
def test_pdf_integration_normsys(backend):
    pyhf.set_backend(backend)
    if isinstance(pyhf.tensorlib, pyhf.tensor.tensorflow_backend):
        tf.reset_default_graph()
        pyhf.tensorlib.session = tf.Session()
    schema = json.load(open('validation/spec.json'))
    source = json.load(open('validation/data/2bin_histosys_example2.json'))
    spec = {
        'channels': [{
            'name':
            'singlechannel',
            'samples': [{
                'name':
                'signal',
                'data':
                source['bindata']['sig'],
                'modifiers': [{
                    'name': 'mu',
                    'type': 'normfactor',
                    'data': None
                }]
            }, {
                'name':
                'background',
                'data':
                source['bindata']['bkg'],
                'modifiers': [{
                    'name': 'bkg_norm',
                    'type': 'normsys',
                    'data': {
                        'lo': 0.9,
                        'hi': 1.1
                    }
                }]
            }]
        }]
    }
    jsonschema.validate(spec, schema)
    pdf = pyhf.hfpdf(spec)

    pars = [None, None]
    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [0.0]]
    assert np.allclose(
        pyhf.tensorlib.tolist(pdf.expected_data(pars, include_auxdata=False)),
        [100, 150])

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [1.0]]
    assert np.allclose(
        pyhf.tensorlib.tolist(pdf.expected_data(pars, include_auxdata=False)),
        [100 * 1.1, 150 * 1.1])

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [-1.0]]
    assert np.allclose(
        pyhf.tensorlib.tolist(pdf.expected_data(pars, include_auxdata=False)),
        [100 * 0.9, 150 * 0.9])
Exemplo n.º 5
0
def test_add_unknown_modifier():
    spec = {
        'channels': [{
            'name':
            'channe',
            'samples': [
                {
                    'modifiers': [{
                        'name': 'a_name',
                        'type': 'this_should_not_exist',
                        'data': None
                    }]
                },
            ]
        }]
    }
    with pytest.raises(pyhf.exceptions.InvalidModifier):
        pyhf.hfpdf(spec)
Exemplo n.º 6
0
def test_pdf_integration_staterror():
    spec = {
        'channels': [
            {
                'name':
                'firstchannel',
                'samples': [{
                    'name':
                    'mu',
                    'data': [10., 10.],
                    'modifiers': [{
                        'name': 'mu',
                        'type': 'normfactor',
                        'data': None
                    }]
                }, {
                    'name':
                    'bkg1',
                    'data': [50.0, 70.0],
                    'modifiers': [{
                        'name': 'stat_firstchannel',
                        'type': 'staterror',
                        'data': [12., 12.]
                    }]
                }, {
                    'name':
                    'bkg2',
                    'data': [30.0, 20.],
                    'modifiers': [{
                        'name': 'stat_firstchannel',
                        'type': 'staterror',
                        'data': [5., 5.]
                    }]
                }, {
                    'name': 'bkg3',
                    'data': [20.0, 15.0],
                    'modifiers': []
                }]
            },
        ]
    }
    pdf = pyhf.hfpdf(spec)
    par = pdf.config.par_slice('stat_firstchannel')
    mod = pdf.config.modifier('stat_firstchannel')
    assert mod.uncertainties == [[12., 12.], [5., 5.]]
    assert mod.nominal_counts == [[50., 70.], [30., 20.]]

    computed = pyhf.tensorlib.tolist(mod.pdf([1.0, 1.0], [1.0, 1.0]))
    expected = pyhf.tensorlib.tolist(
        pyhf.tensorlib.normal([1.0, 1.0],
                              mu=[1.0, 1.0],
                              sigma=[13. / 80., 13. / 90.]))
    for c, e in zip(computed, expected):
        assert c == e
Exemplo n.º 7
0
def test_optim(source, spec, mu, backend):
    pdf = pyhf.hfpdf(spec)
    data = source['bindata']['data'] + pdf.config.auxdata

    init_pars = pdf.config.suggested_init()
    par_bounds = pdf.config.suggested_bounds()

    pyhf.set_backend(backend)
    optim = pyhf.optimizer
    if isinstance(pyhf.tensorlib, pyhf.tensor.tensorflow_backend):
        tf.reset_default_graph()
        pyhf.tensorlib.session = tf.Session()

    result = optim.unconstrained_bestfit(
        pyhf.loglambdav, data, pdf, init_pars, par_bounds)
    assert pyhf.tensorlib.tolist(result)

    result = optim.constrained_bestfit(
        pyhf.loglambdav, mu, data, pdf, init_pars, par_bounds)
    assert pyhf.tensorlib.tolist(result)
Exemplo n.º 8
0
def test_validation(setup):
    source = setup['source']
    pdf = pyhf.hfpdf(setup['spec'])

    if 'channels' in source:
        data = []
        for c in pdf.spec['channels']:
            data += source['channels'][c['name']]['bindata']['data']
        data = data + pdf.config.auxdata
    else:
        data = source['bindata']['data'] + pdf.config.auxdata

    if 'auxdata' in setup['expected']['config']:
        assert len(pdf.config.auxdata) == \
            setup['expected']['config']['auxdata']
    assert len(pdf.config.suggested_init()) == \
        setup['expected']['config']['init_pars']
    assert len(pdf.config.suggested_bounds()) == \
        setup['expected']['config']['par_bounds']

    validate_runOnePoint(pdf, data, setup['mu'], setup['expected']['result'])
Exemplo n.º 9
0
def test_import_histosys():
    schema = json.load(open('validation/spec.json'))
    parsed_xml = pyhf.readxml.parse(
        'validation/xmlimport_input2/config/example.xml',
        'validation/xmlimport_input2')

    # build the spec, strictly checks properties included
    spec = {'channels': parsed_xml['channels']}
    jsonschema.validate(spec, schema)
    pdf = pyhf.hfpdf(spec, poiname='SigXsecOverSM')

    data = [
        binvalue for k in pdf.spec['channels']
        for binvalue in parsed_xml['data'][k['name']]
    ] + pdf.config.auxdata

    channels = {channel['name']: channel for channel in pdf.spec['channels']}
    samples = {
        channel['name']: [sample['name'] for sample in channel['samples']]
        for channel in pdf.spec['channels']
    }

    assert channels['channel2']['samples'][0]['modifiers'][0][
        'type'] == 'histosys'
Exemplo n.º 10
0
def test_pdf_integration_histosys():
    schema = json.load(open('validation/spec.json'))
    source = json.load(open('validation/data/2bin_histosys_example2.json'))
    spec = {
        'channels': [{
            'name':
            'singlechannel',
            'samples': [{
                'name':
                'signal',
                'data':
                source['bindata']['sig'],
                'modifiers': [{
                    'name': 'mu',
                    'type': 'normfactor',
                    'data': None
                }]
            }, {
                'name':
                'background',
                'data':
                source['bindata']['bkg'],
                'modifiers': [{
                    'name': 'bkg_norm',
                    'type': 'histosys',
                    'data': {
                        'lo_data': source['bindata']['bkgsys_dn'],
                        'hi_data': source['bindata']['bkgsys_up']
                    }
                }]
            }]
        }]
    }
    jsonschema.validate(spec, schema)
    pdf = pyhf.hfpdf(spec)

    pars = [None, None]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [1.0]]
    assert pdf.expected_data(pars,
                             include_auxdata=False).tolist() == [102, 190]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [2.0]]
    assert pdf.expected_data(pars,
                             include_auxdata=False).tolist() == [104, 230]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [-1.0]]
    assert pdf.expected_data(pars, include_auxdata=False).tolist() == [98, 100]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[0.0], [-2.0]]
    assert pdf.expected_data(pars, include_auxdata=False).tolist() == [96, 50]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[1.0], [1.0]]
    assert pdf.expected_data(
        pars, include_auxdata=False).tolist() == [102 + 30, 190 + 95]

    pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice(
        'bkg_norm')] = [[1.0], [-1.0]]
    assert pdf.expected_data(
        pars, include_auxdata=False).tolist() == [98 + 30, 100 + 95]