Beispiel #1
0
    def __init__(self, shape, par_map, par_selection):

        default_backend = pyhf.default_backend

        batch_size = shape[0] if len(shape) > 1 else None

        fullsize = default_backend.product(default_backend.astensor(shape))
        flat_indices = default_backend.astensor(range(int(fullsize)),
                                                dtype='int')
        self._all_indices = default_backend.reshape(flat_indices, shape)

        # a tensor viewer that can split and stitch parameters
        self.allpar_viewer = _tensorviewer_from_parmap(par_map, batch_size)

        # a tensor viewer that can split and stitch the selected parameters
        self.selected_viewer = _tensorviewer_from_sizes(
            [
                par_map[s]['slice'].stop - par_map[s]['slice'].start
                for s in par_selection
            ],
            par_selection,
            batch_size,
        )

        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #2
0
    def __init__(self,
                 modifiers,
                 pdfconfig,
                 builder_data,
                 interpcode='code0',
                 batch_size=None):
        self.batch_size = batch_size
        self.interpcode = interpcode
        assert self.interpcode in ['code0', 'code2', 'code4p']

        keys = [f'{mtype}/{m}' for m, mtype in modifiers]
        histosys_mods = [m for m, _ in modifiers]

        parfield_shape = ((self.batch_size,
                           pdfconfig.npars) if self.batch_size else
                          (pdfconfig.npars, ))
        self.param_viewer = ParamViewer(parfield_shape, pdfconfig.par_map,
                                        histosys_mods)

        self._histosys_histoset = [[[
            builder_data[m][s]['data']['lo_data'],
            builder_data[m][s]['data']['nom_data'],
            builder_data[m][s]['data']['hi_data'],
        ] for s in pdfconfig.samples] for m in keys]
        self._histosys_mask = [[[builder_data[m][s]['data']['mask']]
                                for s in pdfconfig.samples] for m in keys]

        if histosys_mods:
            self.interpolator = getattr(interpolators, self.interpcode)(
                self._histosys_histoset)

        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #3
0
    def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None):
        default_backend = pyhf.default_backend
        self.batch_size = batch_size

        keys = [f'{mtype}/{m}' for m, mtype in modifiers]
        self._shapesys_mods = [m for m, _ in modifiers]

        parfield_shape = (self.batch_size or 1, pdfconfig.npars)
        self.param_viewer = ParamViewer(parfield_shape, pdfconfig.par_map,
                                        self._shapesys_mods)

        self._shapesys_mask = [[[builder_data[m][s]['data']['mask']]
                                for s in pdfconfig.samples] for m in keys]
        self.__shapesys_info = default_backend.astensor([[[
            builder_data[m][s]['data']['mask'],
            builder_data[m][s]['data']['nom_data'],
            builder_data[m][s]['data']['uncrt'],
        ] for s in pdfconfig.samples] for m in keys])
        global_concatenated_bin_indices = [[[
            j for c in pdfconfig.channels
            for j in range(pdfconfig.channel_nbins[c])
        ]]]

        self._access_field = default_backend.tile(
            global_concatenated_bin_indices,
            (len(self._shapesys_mods), self.batch_size or 1, 1),
        )
        # access field is shape (sys, batch, globalbin)

        # reindex it based on current masking
        self._reindex_access_field(pdfconfig)

        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #4
0
    def __init__(self, pdfconfig, batch_size=None):
        default_backend = pyhf.default_backend

        self.batch_size = batch_size
        # iterate over all constraints order doesn't matter....

        self.par_indices = list(range(pdfconfig.npars))
        self.data_indices = list(range(len(pdfconfig.auxdata)))
        self.parsets = [
            pdfconfig.param_set(cname) for cname in pdfconfig.auxdata_order
        ]

        pars_constrained_by_poisson = [
            constrained_parameter
            for constrained_parameter in pdfconfig.auxdata_order
            if pdfconfig.param_set(constrained_parameter).pdf_type == 'poisson'
        ]

        parfield_shape = (self.batch_size or 1, pdfconfig.npars)
        self.param_viewer = ParamViewer(parfield_shape, pdfconfig.par_map,
                                        pars_constrained_by_poisson)

        start_index = 0
        poisson_constraint_data = []
        poisson_constraint_rate_factors = []
        for parset in self.parsets:
            end_index = start_index + parset.n_parameters
            thisauxdata = self.data_indices[start_index:end_index]
            start_index = end_index
            if not parset.pdf_type == 'poisson':
                continue

            poisson_constraint_data.append(thisauxdata)
            poisson_constraint_rate_factors.append(parset.factors)

        self._poisson_data = None
        self._access_field = None
        self._batched_factors = None
        if self.param_viewer.index_selection:
            self._poisson_data = default_backend.astensor(
                default_backend.concatenate(poisson_constraint_data),
                dtype='int')

            _poisson_rate_fac = default_backend.astensor(
                default_backend.concatenate(poisson_constraint_rate_factors),
                dtype='float',
            )
            factors = default_backend.reshape(_poisson_rate_fac, (1, -1))
            self._batched_factors = default_backend.tile(
                factors, (self.batch_size or 1, 1))

            access_field = default_backend.concatenate(
                self.param_viewer.index_selection, axis=1)
            self._access_field = access_field

        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #5
0
def test_subscribe_event():
    ename = 'test'

    m = mock.Mock()
    events.subscribe(ename)(m.__call__)

    assert ename in events.__events
    assert m.__call__ == events.__events.get(ename)[0]()
    del events.__events[ename]
Beispiel #6
0
def test_event():
    ename = 'test'

    m = mock.Mock()
    events.subscribe(ename)(m)

    events.trigger(ename)()
    m.assert_called_once()
    del events.__events[ename]
Beispiel #7
0
def test_subscribe_event():
    ename = 'test'

    m = mock.Mock()
    events.subscribe(ename)(m)

    assert ename in events.__events
    assert m in events.__events.get(ename)
    del events.__events[ename]
Beispiel #8
0
def test_event_weakref():
    ename = 'test'

    m = mock.Mock()
    events.subscribe(ename)(m.__call__)
    assert len(events.trigger(ename)) == 1
    # should be weakly referenced
    del m
    assert len(events.trigger(ename)) == 0
    del events.__events[ename]
Beispiel #9
0
def test_subscribe_event():
    ename = 'test'

    m = mock.Mock()
    events.subscribe(ename)(m.__call__)
    assert ename in events.__events
    assert m.__call__.__func__ == events.__events.get(ename)[0][0]()
    assert "weakref" in repr(events.trigger(ename))
    assert list(events.trigger(ename))
    assert len(list(events.trigger(ename))) == 1
    del events.__events[ename]
Beispiel #10
0
def test_subscribe_function(capsys):
    ename = 'test'

    def add(a, b):
        print(a + b)

    events.subscribe(ename)(add)
    events.trigger(ename)(1, 2)

    captured = capsys.readouterr()
    assert captured.out == "3\n"

    del events.__events[ename]
Beispiel #11
0
    def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None):
        self.batch_size = batch_size

        keys = [f'{mtype}/{m}' for m, mtype in modifiers]
        lumi_mods = [m for m, _ in modifiers]

        parfield_shape = ((self.batch_size,
                           pdfconfig.npars) if self.batch_size else
                          (pdfconfig.npars, ))
        self.param_viewer = ParamViewer(parfield_shape, pdfconfig.par_map,
                                        lumi_mods)

        self._lumi_mask = [[[builder_data[m][s]['data']['mask']]
                            for s in pdfconfig.samples] for m in keys]
        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #12
0
    def __init__(self, histogramssets, subscribe=True):
        """Piecewise-linear Interpolation."""
        default_backend = pyhf.default_backend

        self._histogramssets = default_backend.astensor(histogramssets)
        # initial shape will be (nsysts, 1)
        self.alphasets_shape = (self._histogramssets.shape[0], 1)
        # precompute terms that only depend on the histogramssets
        self._deltas_up = self._histogramssets[:, :,
                                               2] - self._histogramssets[:, :,
                                                                         1]
        self._deltas_dn = self._histogramssets[:, :,
                                               1] - self._histogramssets[:, :,
                                                                         0]
        self._broadcast_helper = default_backend.ones(
            default_backend.shape(self._deltas_up))
        self._precompute()
        if subscribe:
            events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #13
0
def test_disable_event():
    ename = 'test'

    m = mock.Mock()
    noop, noop_m = events.noop, mock.Mock()
    events.noop = noop_m
    events.subscribe(ename)(m)

    events.disable(ename)
    assert m.called is False
    assert ename in events.__disabled_events
    assert events.trigger(ename) == events.noop
    assert events.trigger(ename)() == events.noop()
    assert m.called is False
    assert noop_m.is_called_once()
    events.enable(ename)
    assert ename not in events.__disabled_events
    del events.__events[ename]
    events.noop = noop
Beispiel #14
0
    def __init__(self, histogramssets, subscribe=True):
        """Quadratic Interpolation."""
        default_backend = pyhf.default_backend

        self._histogramssets = default_backend.astensor(histogramssets)
        # initial shape will be (nsysts, 1)
        self.alphasets_shape = (self._histogramssets.shape[0], 1)
        # precompute terms that only depend on the histogramssets
        self._a = (
            0.5 *
            (self._histogramssets[:, :, 2] + self._histogramssets[:, :, 0]) -
            self._histogramssets[:, :, 1])
        self._b = 0.5 * (self._histogramssets[:, :, 2] -
                         self._histogramssets[:, :, 0])
        self._b_plus_2a = self._b + 2 * self._a
        self._b_minus_2a = self._b - 2 * self._a
        self._broadcast_helper = default_backend.ones(
            default_backend.shape(self._a))
        self._precompute()
        if subscribe:
            events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #15
0
    def __init__(self, config, modifiers, nominal_rates, batch_size=None):
        default_backend = pyhf.default_backend

        self.config = config

        self._factor_mods = []
        self._delta_mods = []
        self.batch_size = batch_size

        self._nominal_rates = default_backend.tile(
            nominal_rates, (1, 1, self.batch_size or 1, 1))

        self.modifiers_appliers = modifiers

        for modifier_applier in self.modifiers_appliers.values():
            if modifier_applier.op_code == "addition":
                self._delta_mods.append(modifier_applier.name)
            elif modifier_applier.op_code == "multiplication":
                self._factor_mods.append(modifier_applier.name)

        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #16
0
def test_trigger_function(capsys):
    ename = 'test'

    def add(a, b):
        print(a + b)

    precall = mock.Mock()
    postcall = mock.Mock()

    wrapped_add = events.register(ename)(add)
    events.subscribe(f'{ename}::before')(precall.__call__)
    events.subscribe(f'{ename}::after')(postcall.__call__)

    precall.assert_not_called()
    postcall.assert_not_called()

    wrapped_add(1, 2)
    captured = capsys.readouterr()
    assert captured.out == "3\n"
    precall.assert_called_once()
    postcall.assert_called_once()

    del events.__events[f'{ename}::before']
    del events.__events[f'{ename}::after']
Beispiel #17
0
    def __init__(self, indices, batch_size=None, names=None):
        # self.partition_indices has the "target" indices
        # of the stitched vector. In order to  .gather()
        # an concatennation of source arrays into the
        # desired form, one needs to gather on the "sorted"
        # indices
        # >>> source = np.asarray([9,8,7,6])
        # >>> target = np.asarray([2,1,3,0])
        # >>> source[target.argsort()]
        # array([6, 8, 9, 7])

        default_backend = pyhf.default_backend

        self.batch_size = batch_size
        self.names = names
        self._partition_indices = indices
        _concat_indices = default_backend.astensor(default_backend.concatenate(
            self._partition_indices),
                                                   dtype='int')
        self._sorted_indices = default_backend.tolist(
            _concat_indices.argsort())

        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #18
0
    def __init__(self, histogramssets, subscribe=True, alpha0=1):
        """Polynomial Interpolation."""
        default_backend = pyhf.default_backend

        # alpha0 is assumed to be positive and non-zero. If alpha0 == 0, then
        # we cannot calculate the coefficients (e.g. determinant == 0)
        assert alpha0 > 0
        self.__alpha0 = alpha0

        self._histogramssets = default_backend.astensor(histogramssets)
        # initial shape will be (nsysts, 1)
        self.alphasets_shape = (self._histogramssets.shape[0], 1)
        # precompute terms that only depend on the histogramssets
        self._deltas_up = default_backend.divide(self._histogramssets[:, :, 2],
                                                 self._histogramssets[:, :, 1])
        self._deltas_dn = default_backend.divide(self._histogramssets[:, :, 0],
                                                 self._histogramssets[:, :, 1])
        self._broadcast_helper = default_backend.ones(
            default_backend.shape(self._deltas_up))
        self._alpha0 = self._broadcast_helper * self.__alpha0

        deltas_up_alpha0 = default_backend.power(self._deltas_up, self._alpha0)
        deltas_dn_alpha0 = default_backend.power(self._deltas_dn, self._alpha0)
        # x = A^{-1} b
        A_inverse = default_backend.astensor([
            [
                15.0 / (16 * alpha0),
                -15.0 / (16 * alpha0),
                -7.0 / 16.0,
                -7.0 / 16.0,
                1.0 / 16 * alpha0,
                -1.0 / 16.0 * alpha0,
            ],
            [
                3.0 / (2 * math.pow(alpha0, 2)),
                3.0 / (2 * math.pow(alpha0, 2)),
                -9.0 / (16 * alpha0),
                9.0 / (16 * alpha0),
                1.0 / 16,
                1.0 / 16,
            ],
            [
                -5.0 / (8 * math.pow(alpha0, 3)),
                5.0 / (8 * math.pow(alpha0, 3)),
                5.0 / (8 * math.pow(alpha0, 2)),
                5.0 / (8 * math.pow(alpha0, 2)),
                -1.0 / (8 * alpha0),
                1.0 / (8 * alpha0),
            ],
            [
                3.0 / (-2 * math.pow(alpha0, 4)),
                3.0 / (-2 * math.pow(alpha0, 4)),
                -7.0 / (-8 * math.pow(alpha0, 3)),
                7.0 / (-8 * math.pow(alpha0, 3)),
                -1.0 / (8 * math.pow(alpha0, 2)),
                -1.0 / (8 * math.pow(alpha0, 2)),
            ],
            [
                3.0 / (16 * math.pow(alpha0, 5)),
                -3.0 / (16 * math.pow(alpha0, 5)),
                -3.0 / (16 * math.pow(alpha0, 4)),
                -3.0 / (16 * math.pow(alpha0, 4)),
                1.0 / (16 * math.pow(alpha0, 3)),
                -1.0 / (16 * math.pow(alpha0, 3)),
            ],
            [
                1.0 / (2 * math.pow(alpha0, 6)),
                1.0 / (2 * math.pow(alpha0, 6)),
                -5.0 / (16 * math.pow(alpha0, 5)),
                5.0 / (16 * math.pow(alpha0, 5)),
                1.0 / (16 * math.pow(alpha0, 4)),
                1.0 / (16 * math.pow(alpha0, 4)),
            ],
        ])
        b = default_backend.stack([
            deltas_up_alpha0 - self._broadcast_helper,
            deltas_dn_alpha0 - self._broadcast_helper,
            default_backend.log(self._deltas_up) * deltas_up_alpha0,
            -default_backend.log(self._deltas_dn) * deltas_dn_alpha0,
            default_backend.power(default_backend.log(self._deltas_up), 2) *
            deltas_up_alpha0,
            default_backend.power(default_backend.log(self._deltas_dn), 2) *
            deltas_dn_alpha0,
        ])
        self._coefficients = default_backend.einsum('rc,shb,cshb->rshb',
                                                    A_inverse,
                                                    self._broadcast_helper, b)

        self._precompute()
        if subscribe:
            events.subscribe('tensorlib_changed')(self._precompute)
Beispiel #19
0
    def __init__(self, pdfconfig, batch_size=None):
        default_backend = pyhf.default_backend

        self.batch_size = batch_size
        # iterate over all constraints order doesn't matter....

        self.data_indices = list(range(len(pdfconfig.auxdata)))
        self.parsets = [
            pdfconfig.param_set(cname) for cname in pdfconfig.auxdata_order
        ]

        pars_constrained_by_normal = [
            constrained_parameter
            for constrained_parameter in pdfconfig.auxdata_order
            if pdfconfig.param_set(constrained_parameter).pdf_type == 'normal'
        ]

        parfield_shape = (self.batch_size or 1, pdfconfig.npars)
        self.param_viewer = ParamViewer(parfield_shape, pdfconfig.par_map,
                                        pars_constrained_by_normal)

        start_index = 0
        normal_constraint_data = []
        normal_constraint_sigmas = []
        # loop over parameters (in auxdata order) and collect
        # means / sigmas of constraint term as well as data
        # skip parsets that are not constrained by onrmal
        for parset in self.parsets:
            end_index = start_index + parset.n_parameters
            thisauxdata = self.data_indices[start_index:end_index]
            start_index = end_index
            if not parset.pdf_type == 'normal':
                continue

            normal_constraint_data.append(thisauxdata)

            # many constraints are defined on a unit gaussian
            # but we reserved the possibility that a paramset
            # can define non-standard uncertainties. This is used
            # by the paramset associated to staterror modifiers.
            # Such parsets define a 'sigmas' attribute
            try:
                normal_constraint_sigmas.append(parset.sigmas)
            except AttributeError:
                normal_constraint_sigmas.append([1.0] * len(thisauxdata))

        self._normal_data = None
        self._sigmas = None
        self._access_field = None
        # if this constraint terms is at all used (non-zrto idx selection
        # start preparing constant tensors
        if self.param_viewer.index_selection:
            self._normal_data = default_backend.astensor(
                default_backend.concatenate(normal_constraint_data),
                dtype='int')

            _normal_sigmas = default_backend.concatenate(
                normal_constraint_sigmas)
            if self.batch_size:
                sigmas = default_backend.reshape(_normal_sigmas, (1, -1))
                self._sigmas = default_backend.tile(sigmas,
                                                    (self.batch_size, 1))
            else:
                self._sigmas = _normal_sigmas

            access_field = default_backend.concatenate(
                self.param_viewer.index_selection, axis=1)
            self._access_field = access_field

        self._precompute()
        events.subscribe('tensorlib_changed')(self._precompute)