def test_lumi(backend): mc = MockConfig( par_map={ 'lumi': { 'paramset': constrained_by_normal( n_parameters=1, inits=[0], bounds=[[-5, 5]], fixed=False, auxdata=[None], sigmas=[None], ), 'slice': slice(0, 1), } }, par_order=['lumi'], samples=['signal', 'background'], ) mega_mods = { 'lumi/lumi': { 'signal': { 'type': 'lumi', 'name': 'lumi', 'data': {'mask': [True, True, True]}, }, 'background': { 'type': 'lumi', 'name': 'lumi', 'data': {'mask': [True, True, True]}, }, }, } hsc = lumi_combined([('lumi', 'lumi')], mc, mega_mods) mod = hsc.apply(pyhf.tensorlib.astensor([0.5])) shape = pyhf.tensorlib.shape(mod) assert shape == (1, 2, 1, 3) mod = np.asarray(pyhf.tensorlib.tolist(mod)) assert np.allclose(mod[0, 0, 0], [0.5, 0.5, 0.5]) assert np.allclose(mod[0, 1, 0], [0.5, 0.5, 0.5]) hsc = lumi_combined([('lumi', 'lumi')], mc, mega_mods, batch_size=4) mod = hsc.apply(pyhf.tensorlib.astensor([[1.0], [2.0], [3.0], [4.0]])) shape = pyhf.tensorlib.shape(mod) assert shape == (1, 2, 4, 3) mod = np.asarray(pyhf.tensorlib.tolist(mod)) assert np.allclose(mod[0, 0, 0], [1.0, 1.0, 1.0]) assert np.allclose(mod[0, 0, 1], [2.0, 2.0, 2.0]) assert np.allclose(mod[0, 0, 2], [3.0, 3.0, 3.0]) assert np.allclose(mod[0, 0, 3], [4.0, 4.0, 4.0])
def test_histosys(backend): mc = MockConfig( par_map={ 'hello': { 'paramset': constrained_by_normal( name='hello', is_scalar=True, n_parameters=1, inits=[0], bounds=[[-5, 5]], fixed=False, auxdata=[0.0], ), 'slice': slice(0, 1), }, 'world': { 'paramset': constrained_by_normal( name='world', is_scalar=True, n_parameters=1, inits=[0], bounds=[[-5, 5]], fixed=False, auxdata=[0.0], ), 'slice': slice(1, 2), }, }, par_order=['hello', 'world'], samples=['signal', 'background'], ) mega_mods = { 'histosys/hello': { 'signal': { 'type': 'histosys', 'name': 'hello', 'data': { 'hi_data': [11, 12, 13], 'lo_data': [9, 8, 7], 'nom_data': [10, 10, 10], 'mask': [True, True, True], }, }, 'background': { 'type': 'histosys', 'name': 'hello', 'data': { 'hi_data': [11, 12, 13], 'lo_data': [9, 8, 7], 'nom_data': [10, 10, 10], 'mask': [True, True, True], }, }, }, 'histosys/world': { 'signal': { 'type': 'histosys', 'name': 'world', 'data': { 'hi_data': [10, 10, 10], 'lo_data': [5, 6, 7], 'nom_data': [10, 10, 10], 'mask': [True, True, True], }, }, 'background': { 'type': 'histosys', 'name': 'world', 'data': { 'hi_data': [10, 10, 10], 'lo_data': [5, 6, 7], 'nom_data': [10, 10, 10], 'mask': [True, True, True], }, }, }, } hsc = histosys_combined([('hello', 'histosys'), ('world', 'histosys')], mc, mega_mods) mod = hsc.apply(pyhf.tensorlib.astensor([0.5, -1.0])) shape = pyhf.tensorlib.shape(mod) assert shape == (2, 2, 1, 3) mod = np.asarray(pyhf.tensorlib.tolist(mod)) assert np.allclose(mod[0, 0, 0], [0.5, 1.0, 1.5]) hsc = histosys_combined([('hello', 'histosys'), ('world', 'histosys')], mc, mega_mods, batch_size=4) mod = hsc.apply( pyhf.tensorlib.astensor([[-1.0, -1.0], [1.0, 1.0], [-1.0, 1.0], [1.0, 1.0]])) shape = pyhf.tensorlib.shape(mod) assert shape == (2, 2, 4, 3) mod = np.asarray(pyhf.tensorlib.tolist(mod)) assert np.allclose(mod[0, 0, 0], [-1.0, -2.0, -3.0]) assert np.allclose(mod[0, 0, 1], [1.0, 2.0, 3.0]) assert np.allclose(mod[0, 0, 2], [-1.0, -2.0, -3.0]) assert np.allclose(mod[0, 0, 3], [1.0, 2.0, 3.0])
def test_stat(backend): mc = MockConfig( par_map={ 'staterror_chan1': { 'paramset': constrained_by_normal( name='staterror_chan1', is_scalar=False, n_parameters=1, inits=[1], bounds=[[0, 10]], fixed=False, auxdata=[1], ), 'slice': slice(0, 1), }, 'staterror_chan2': { 'paramset': constrained_by_normal( name='staterror_chan2', is_scalar=False, n_parameters=2, inits=[1, 1], bounds=[[0, 10], [0, 10]], fixed=False, auxdata=[1, 1], ), 'slice': slice(1, 3), }, }, channels=['chan1', 'chan2'], channel_nbins={ 'chan1': 1, 'chan2': 2 }, par_order=['staterror_chan1', 'staterror_chan2'], samples=['signal', 'background'], ) mega_mods = { 'staterror/staterror_chan1': { 'signal': { 'type': 'staterror', 'name': 'staterror_chan1', 'data': { 'mask': [True, False, False], 'nom_data': [10, 10, 10], 'uncrt': [1, 0, 0], }, }, 'background': { 'type': 'staterror', 'name': 'staterror_chan1', 'data': { 'mask': [True, False, False], 'nom_data': [10, 10, 10], 'uncrt': [1, 0, 0], }, }, }, 'staterror/staterror_chan2': { 'signal': { 'type': 'staterror', 'name': 'staterror_chan2', 'data': { 'mask': [False, True, True], 'nom_data': [10, 10, 10], 'uncrt': [0, 1, 1], }, }, 'background': { 'type': 'staterror', 'name': 'staterror_chan2', 'data': { 'mask': [False, True, True], 'nom_data': [10, 10, 10], 'uncrt': [0, 1, 1], }, }, }, } hsc = staterror_combined( [('staterror_chan1', 'staterror'), ('staterror_chan2', 'staterror')], mc, mega_mods, ) mod = hsc.apply(pyhf.tensorlib.astensor([1.1, 1.2, 1.3])) shape = pyhf.tensorlib.shape(mod) assert shape == (2, 2, 1, 3) mod = np.asarray(pyhf.tensorlib.tolist(mod)) assert np.allclose(mod[0, 0, 0], [1.1, 1.0, 1.0]) assert np.allclose(mod[1, 0, 0], [1, 1.2, 1.3])
def test_batched_constraints(backend): config = MockConfig( par_order=['pois1', 'pois2', 'norm1', 'norm2'], par_map={ 'pois1': { 'paramset': constrained_by_poisson( name='pois1', is_scalar=False, n_parameters=1, inits=[1.0], bounds=[[0, 10]], auxdata=[12], factors=[12], fixed=False, ), 'slice': slice(0, 1), 'auxdata': [1], }, 'pois2': { 'paramset': constrained_by_poisson( name='pois2', is_scalar=False, n_parameters=2, inits=[1.0] * 2, bounds=[[0, 10]] * 2, auxdata=[13, 14], factors=[13, 14], fixed=False, ), 'slice': slice(1, 3), }, 'norm1': { 'paramset': constrained_by_normal( name='norm1', is_scalar=False, n_parameters=2, inits=[0] * 2, bounds=[[0, 10]] * 2, auxdata=[0, 0], sigmas=[1.5, 2.0], fixed=False, ), 'slice': slice(3, 5), }, 'norm2': { 'paramset': constrained_by_normal( name='norm2', is_scalar=False, n_parameters=3, inits=[0] * 3, bounds=[[0, 10]] * 3, auxdata=[0, 0, 0], fixed=False, ), 'slice': slice(5, 8), }, }, ) suggested_pars = [1.0] * 3 + [0.0] * 5 # 2 pois 5 norm constraint = poisson_constraint_combined(config) result = default_backend.astensor( pyhf.tensorlib.tolist( constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor(suggested_pars), ))) assert np.isclose( result, sum([ default_backend.poisson_logpdf(data, rate) for data, rate in zip([12, 13, 14], [12, 13, 14]) ]), ) assert result.shape == () suggested_pars = [1.1] * 3 + [0.0] * 5 # 2 pois 5 norm constraint = poisson_constraint_combined(config) result = default_backend.astensor( pyhf.tensorlib.tolist( constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor(suggested_pars), ))) assert np.isclose( result, sum([ default_backend.poisson_logpdf(data, rate) for data, rate in zip([12, 13, 14], [12 * 1.1, 13 * 1.1, 14 * 1.1]) ]), ) assert result.shape == () constraint = poisson_constraint_combined(config, batch_size=10) result = constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor([suggested_pars] * 10), ) assert result.shape == (10, ) suggested_pars = [ [1.1, 1.2, 1.3] + [0.0] * 5, # 2 pois 5 norm [0.7, 0.8, 0.9] + [0.0] * 5, # 2 pois 5 norm [0.4, 0.5, 0.6] + [0.0] * 5, # 2 pois 5 norm ] constraint = poisson_constraint_combined(config, batch_size=3) result = default_backend.astensor( pyhf.tensorlib.tolist( constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor(suggested_pars), ))) assert np.all( np.isclose( result, np.sum( [ [ default_backend.poisson_logpdf(data, rate) for data, rate in zip([12, 13, 14], [12 * 1.1, 13 * 1.2, 14 * 1.3]) ], [ default_backend.poisson_logpdf(data, rate) for data, rate in zip([12, 13, 14], [12 * 0.7, 13 * 0.8, 14 * 0.9]) ], [ default_backend.poisson_logpdf(data, rate) for data, rate in zip([12, 13, 14], [12 * 0.4, 13 * 0.5, 14 * 0.6]) ], ], axis=1, ), )) assert result.shape == (3, ) suggested_pars = [1.0] * 3 + [0.0] * 5 # 2 pois 5 norm constraint = gaussian_constraint_combined(config, batch_size=1) result = default_backend.astensor( pyhf.tensorlib.tolist( constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor(suggested_pars), ))) assert np.isclose( result[0], sum([ default_backend.normal_logpdf(data, mu, sigma) for data, mu, sigma in zip([0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [1.5, 2.0, 1.0, 1.0, 1.0]) ]), ) assert result.shape == (1, ) suggested_pars = [1.0] * 3 + [1, 2, 3, 4, 5] # 2 pois 5 norm constraint = gaussian_constraint_combined(config, batch_size=1) result = default_backend.astensor( pyhf.tensorlib.tolist( constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor(suggested_pars), ))) assert np.isclose( result[0], sum([ default_backend.normal_logpdf(data, mu, sigma) for data, mu, sigma in zip([0, 0, 0, 0, 0], [1, 2, 3, 4, 5], [1.5, 2.0, 1.0, 1.0, 1.0]) ]), ) assert result.shape == (1, ) suggested_pars = [ [1.0] * 3 + [1, 2, 3, 4, 5], # 2 pois 5 norm [1.0] * 3 + [-1, -2, -3, -4, -5], # 2 pois 5 norm [1.0] * 3 + [-1, -2, 0, 1, 2], # 2 pois 5 norm ] constraint = gaussian_constraint_combined(config, batch_size=3) result = default_backend.astensor( pyhf.tensorlib.tolist( constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor(suggested_pars), ))) assert np.all( np.isclose( result, np.sum( [ [ default_backend.normal_logpdf(data, mu, sigma) for data, mu, sigma in zip([0, 0, 0, 0, 0], [1, 2, 3, 4, 5], [1.5, 2.0, 1.0, 1.0, 1.0]) ], [ default_backend.normal_logpdf(data, mu, sigma) for data, mu, sigma in zip( [0, 0, 0, 0, 0], [-1, -2, -3, -4, -5], [1.5, 2.0, 1.0, 1.0, 1.0], ) ], [ default_backend.normal_logpdf(data, mu, sigma) for data, mu, sigma in zip( [0, 0, 0, 0, 0], [-1, -2, 0, 1, 2], [1.5, 2.0, 1.0, 1.0, 1.0], ) ], ], axis=1, ), )) assert result.shape == (3, ) constraint = gaussian_constraint_combined(config, batch_size=10) result = constraint.logpdf( pyhf.tensorlib.astensor(config.auxdata), pyhf.tensorlib.astensor([suggested_pars] * 10), ) assert result.shape == (10, )
def test_normsys(backend): mc = MockConfig( par_map={ 'hello': { 'paramset': constrained_by_normal( n_parameters=1, inits=[0], bounds=[[-5, 5]], fixed=False, auxdata=[0.0], ), 'slice': slice(0, 1), }, 'world': { 'paramset': constrained_by_normal( n_parameters=1, inits=[0], bounds=[[-5, 5]], fixed=False, auxdata=[0.0], ), 'slice': slice(1, 2), }, }, par_order=['hello', 'world'], samples=['signal', 'background'], ) mega_mods = { 'normsys/hello': { 'signal': { 'type': 'normsys', 'name': 'hello', 'data': { 'hi': [1.1] * 3, 'lo': [0.9] * 3, 'nom_data': [1, 1, 1], 'mask': [True, True, True], }, }, 'background': { 'type': 'normsys', 'name': 'hello', 'data': { 'hi': [1.2] * 3, 'lo': [0.8] * 3, 'nom_data': [1, 1, 1], 'mask': [True, True, True], }, }, }, 'normsys/world': { 'signal': { 'type': 'v', 'name': 'world', 'data': { 'hi': [1.3] * 3, 'lo': [0.7] * 3, 'nom_data': [1, 1, 1], 'mask': [True, True, True], }, }, 'background': { 'type': 'normsys', 'name': 'world', 'data': { 'hi': [1.4] * 3, 'lo': [0.6] * 3, 'nom_data': [1, 1, 1], 'mask': [True, True, True], }, }, }, } hsc = normsys_combined([('hello', 'normsys'), ('world', 'normsys')], mc, mega_mods) mod = hsc.apply(pyhf.tensorlib.astensor([1.0, -1.0])) shape = pyhf.tensorlib.shape(mod) assert shape == (2, 2, 1, 3) mod = np.asarray(pyhf.tensorlib.tolist(mod)) assert np.allclose(mod[0, 0, 0], [1.1, 1.1, 1.1]) assert np.allclose(mod[0, 1, 0], [1.2, 1.2, 1.2]) assert np.allclose(mod[1, 0, 0], [0.7, 0.7, 0.7]) assert np.allclose(mod[1, 1, 0], [0.6, 0.6, 0.6]) hsc = normsys_combined( [('hello', 'normsys'), ('world', 'normsys')], mc, mega_mods, batch_size=4 ) mod = hsc.apply( pyhf.tensorlib.astensor([[-1.0, -1.0], [1.0, 1.0], [-1.0, -1.0], [1.0, 1.0]]) ) shape = pyhf.tensorlib.shape(mod) assert shape == (2, 2, 4, 3) mod = np.asarray(pyhf.tensorlib.tolist(mod)) assert np.allclose(mod[0, 0, 0], [0.9, 0.9, 0.9]) assert np.allclose(mod[0, 0, 1], [1.1, 1.1, 1.1]) assert np.allclose(mod[0, 0, 2], [0.9, 0.9, 0.9]) assert np.allclose(mod[0, 0, 3], [1.1, 1.1, 1.1])