def func_jacobian( self, variable_dict: Dict[Variable, np.ndarray], variables: Optional[Tuple[Variable, ...]] = None, axis: Axis = False, **kwargs, ) -> Tuple[FactorValue, JacobianValue]: """ Call this factor with a set of arguments Parameters ---------- args Positional arguments for the underlying factor kwargs Keyword arguments for the underlying factor Returns ------- An object encapsulating the value for the factor """ if variables is None: variables = self.variables variable_names = tuple(self._variable_name_kw[v.name] for v in variables) kwargs = self.resolve_variable_dict(variable_dict) vals, *jacs = self._call_factor(kwargs, variables=variable_names) shift, shape = self._function_shape(**kwargs) plate_dim = dict(zip(self.plates, shape[shift:])) det_shapes = { v: shape[:shift] + tuple(plate_dim[p] for p in v.plates) for v in self.deterministic_variables } var_shapes = {self._kwargs[v]: np.shape(x) for v, x in kwargs.items()} if not (isinstance(vals, tuple) or self.n_deterministic > 1): vals = vals, log_val = (0. if (shape == () or axis is None) else aggregate( np.zeros(tuple(1 for _ in shape)), axis)) det_vals = { k: np.reshape(val, det_shapes[k]) if det_shapes[k] else val for k, val in zip(self._deterministic_variables, vals) } fval = FactorValue(log_val, det_vals) vjacs = {} for k, _jacs in zip(self._deterministic_variables, jacs): for v, jac in zip(variables, _jacs): vjacs.setdefault(v, {})[k] = np.reshape( jac, det_shapes[k] + var_shapes[v][shift:]) fjac = { v: FactorValue(np.zeros(np.shape(log_val) + var_shapes[v]), vjacs[v]) for v in variables } return fval, fjac
def __call__( self, variable_dict: Dict[Variable, np.ndarray], axis: Axis = False, ) -> FactorValue: values = self.resolve_variable_dict(variable_dict) val = self._call_factor(values, variables=None) val = aggregate(val, axis) return FactorValue(val, {})
def func_jacobian(self, variable_dict: Dict[Variable, np.ndarray], variables: Optional[Tuple[Variable, ...]] = None, axis: Axis = False, **kwargs) -> Tuple[FactorValue, JacobianValue]: """ Call the underlying factor Parameters ---------- variable_dict : the values to call the function with variables : tuple of Variables the variables to calculate gradients and Jacobians for if variables = None then differentiates wrt all variables axis : None or False or int or tuple of ints, optional the axes to reduce the result over. if axis = None the sum over all dimensions if axis = False then does not reduce result Returns ------- FactorValue, JacobianValue encapsulating the result of the function call """ if variables is None: variables = self.variables variable_names = tuple(self._variable_name_kw[v.name] for v in variables) kwargs = self.resolve_variable_dict(variable_dict) val, jacs = self._call_factor(kwargs, variables=variable_names) grad_axis = tuple(range(np.ndim(val))) if axis is None else axis fval = FactorValue(aggregate(self._reshape_factor(val, kwargs), axis)) fjac = { v: FactorValue(aggregate(jac, grad_axis)) for v, jac in zip(variables, jacs) } return fval, fjac
def logpdf_gradient(self, values: Dict[Variable, np.ndarray], axis: Axis = False, **kwargs): logl = 0 gradl = {} for v, m in self.items(): lv, gradl[v] = m.logpdf_gradient(values[v]) lv = aggregate(self._broadcast(self._variable_plates[v], lv), axis=axis) logl = add_arrays(logl, lv) return logl, gradl
def __call__( self, variable_dict: Dict[Variable, np.ndarray], axis: Axis = False, ) -> FactorValue: """ Call each function in the graph in the correct order, adding the logarithmic results. Deterministic values computed in initial factor calls are added to a dictionary and passed to subsequent factor calls. Parameters ---------- variable_dict Positional arguments axis Keyword arguments Returns ------- Object comprising the log value of the computation and a dictionary containing the values of deterministic variables. """ # generate set of factors to call, these are indexed by the # missing deterministic variables that need to be calculated log_value = 0. det_values = {} variables = variable_dict.copy() missing = set(v.name for v in self.variables).difference(v.name for v in variables) if missing: n_miss = len(missing) missing_str = ", ".join(missing) raise ValueError( f"{self} missing {n_miss} arguments: {missing_str}" f"factor graph call signature: {self.call_signature}") for calls in self._call_sequence: # TODO parallelise this part? for factor in calls: ret = factor(variables) ret_value = self.broadcast_plates(factor.plates, ret.log_value) log_value = add_arrays(log_value, aggregate(ret_value, axis)) det_values.update(ret.deterministic_values) variables.update(ret.deterministic_values) return FactorValue(log_value, det_values)
def logpdf( self, values: Dict[Variable, np.ndarray], axis: Axis = False, ) -> np.ndarray: """Calculates the logpdf of the passed values for messages the result is broadcast to the appropriate shape given the variable plates """ return reduce(add_arrays, (aggregate(self._broadcast( self._variable_plates[v], m.logpdf(values[v])), axis=axis) for v, m in self.items()))
def logpdf_gradient_hessian(self, values: Dict[Variable, np.ndarray], axis: Optional[Union[bool, int, Tuple[int, ...]]] = False, **kwargs): logl = 0. gradl = {} hessl = {} for v, m in self.items(): lv, gradl[v], hessl[v] = m.logpdf_gradient_hessian(values[v]) lv = aggregate(self._broadcast(self._variable_plates[v], lv), axis=axis) logl = add_arrays(logl, lv) return logl, gradl, hessl
def __call__( self, variable_dict: Dict[Variable, np.ndarray], axis: Axis = False, # **kwargs: np.ndarray ) -> FactorValue: """ Call this factor with a set of arguments Parameters ---------- args Positional arguments for the underlying factor kwargs Keyword arguments for the underlying factor Returns ------- An object encapsulating the value for the factor """ kwargs = self.resolve_variable_dict(variable_dict) res = self._call_factor(**kwargs) shift, shape = self._function_shape(**kwargs) plate_dim = dict(zip(self.plates, shape[shift:])) det_shapes = { v: shape[:shift] + tuple( plate_dim[p] for p in v.plates) for v in self.deterministic_variables } if not (isinstance(res, tuple) or self.n_deterministic > 1): res = res, log_val = ( 0. if (shape == () or axis is None) else aggregate(np.zeros(tuple(1 for _ in shape)), axis)) det_vals = { k: np.reshape(val, det_shapes[k]) if det_shapes[k] else val for k, val in zip(self._deterministic_variables, res) } return FactorValue(log_val, det_vals)
def __call__( self, variable_dict: Dict[Variable, np.ndarray], axis: Axis = False, # **kwargs: np.ndarray ) -> FactorValue: """ Call the underlying factor Parameters ---------- args Positional arguments for the factor kwargs Keyword arguments for the factor Returns ------- Object encapsulating the result of the function call """ kwargs = self.resolve_variable_dict(variable_dict) val = self._call_factor(**kwargs) val = aggregate(self._reshape_factor(val, kwargs), axis) return FactorValue(val, {})