Exemplo n.º 1
0
    def _weight_samples(factor: "Factor", samples: Dict[str, np.ndarray],
                        det_vars: Dict[str,
                                       np.ndarray], log_factor: np.ndarray,
                        cavity_dist: Dict[str, AbstractMessage],
                        deterministic_dist: Dict[str, AbstractMessage],
                        proposal_dist: Dict[str, AbstractMessage],
                        n_samples: int) -> SamplingResult:

        log_measure = 0.
        for res in chain(map_dists(cavity_dist, samples),
                         map_dists(deterministic_dist, det_vars)):
            # for res in map_dists(cavity_dist, {**det_vars, **samples}):
            log_measure = add_arrays(log_measure,
                                     factor.broadcast_variable(*res))

        log_propose = 0.
        for res in map_dists(proposal_dist, samples):
            log_propose = add_arrays(log_propose,
                                     factor.broadcast_variable(*res))

        log_weights = log_factor + log_measure - log_propose

        assert np.isfinite(log_weights).all()

        return SamplingResult(samples=samples,
                              det_variables=det_vars,
                              log_weights=log_weights,
                              log_factor=log_factor,
                              log_measure=log_measure,
                              log_propose=log_propose,
                              n_samples=n_samples)
Exemplo n.º 2
0
 def __call__(
     self,
     values: Dict[Variable, np.ndarray],
     axis: Axis = False,
 ) -> FactorValue:
     fval = self.factor(values, axis=axis)
     log_meanfield = self.cavity_dist(
         {
             **values,
             **fval.deterministic_values
         }, axis=axis)
     return add_arrays(fval, log_meanfield)
Exemplo n.º 3
0
    def logpdf_gradient(self,
                        values: Dict[Variable, np.ndarray],
                        axis: Axis = False,
                        **kwargs):
        logl = 0
        gradl = {}
        for v, m in self.items():
            lv, gradl[v] = m.logpdf_gradient(values[v])
            lv = aggregate(self._broadcast(self._variable_plates[v], lv),
                           axis=axis)
            logl = add_arrays(logl, lv)

        return logl, gradl
Exemplo n.º 4
0
    def __call__(
        self,
        variable_dict: Dict[Variable, np.ndarray],
        axis: Axis = False,
    ) -> FactorValue:
        """
        Call each function in the graph in the correct order, adding the logarithmic results.

        Deterministic values computed in initial factor calls are added to a dictionary and
        passed to subsequent factor calls.

        Parameters
        ----------
        variable_dict
            Positional arguments
        axis
            Keyword arguments

        Returns
        -------
        Object comprising the log value of the computation and a dictionary containing
        the values of deterministic variables.
        """

        # generate set of factors to call, these are indexed by the
        # missing deterministic variables that need to be calculated
        log_value = 0.
        det_values = {}
        variables = variable_dict.copy()

        missing = set(v.name
                      for v in self.variables).difference(v.name
                                                          for v in variables)
        if missing:
            n_miss = len(missing)
            missing_str = ", ".join(missing)
            raise ValueError(
                f"{self} missing {n_miss} arguments: {missing_str}"
                f"factor graph call signature: {self.call_signature}")

        for calls in self._call_sequence:
            # TODO parallelise this part?
            for factor in calls:
                ret = factor(variables)
                ret_value = self.broadcast_plates(factor.plates, ret.log_value)
                log_value = add_arrays(log_value, aggregate(ret_value, axis))
                det_values.update(ret.deterministic_values)
                variables.update(ret.deterministic_values)

        return FactorValue(log_value, det_values)
Exemplo n.º 5
0
    def logpdf_gradient_hessian(self,
                                values: Dict[Variable, np.ndarray],
                                axis: Optional[Union[bool, int,
                                                     Tuple[int, ...]]] = False,
                                **kwargs):
        logl = 0.
        gradl = {}
        hessl = {}
        for v, m in self.items():
            lv, gradl[v], hessl[v] = m.logpdf_gradient_hessian(values[v])
            lv = aggregate(self._broadcast(self._variable_plates[v], lv),
                           axis=axis)
            logl = add_arrays(logl, lv)

        return logl, gradl, hessl