예제 #1
0
    def calculate_log_probability(self, input_values, reevaluate=True, for_gradient=False,
                                  include_parents=True, normalized=True):
        """
        Method. It returns the log probability of the values given the model. This value is always 0 since the probability
        of a deterministic variable having its value is always 1.

        Args:
            values: Dictionary(brancher.Variable: chainer.Variable). A dictionary having the brancher.variables of the
            model as keys and chainer.Variables as values. This dictionary has to provide values for all variables of
            the model except for the deterministic variables.

            reevaluate: Bool. If false it returns the output of the latest call. It avoid unnecessary computations when
            multiple children variables ask for the log probability of the same paternt variable.

        Returns:
            torch.Tensor. The log probability of the input values given the model.

        """
        if self._evaluated and not reevaluate:
            return 0.
        value = self._get_its_own_value_from_input(input_values, reevaluate)
        self._evaluated = True
        parameters_dict = self._get_parameters_from_input_values(input_values)
        log_probability = self.distribution.calculate_log_probability(value, **parameters_dict)
        parents_log_probability = sum([parent.calculate_log_probability(input_values, reevaluate, for_gradient,
                                                                        normalized=normalized)
                                       for parent in self.parents])
        if self.is_observed:
            log_probability = log_probability.sum(dim=1, keepdim=True)
        if is_tensor(log_probability) and is_tensor(parents_log_probability):
            log_probability, parents_log_probability = partial_broadcast(log_probability, parents_log_probability)
        if include_parents:
            return log_probability + parents_log_probability
        else:
            return log_probability
예제 #2
0
    def calculate_log_probability(self, input_values, reevaluate=True):
        """
        Method. It returns the log probability of the values given the model. This value is always 0 since the probability
        of a deterministic variable having its value is always 1.

        Args:
            values: Dictionary(brancher.Variable: chainer.Variable). A dictionary having the brancher.variables of the
            model as keys and chainer.Variables as values. This dictionary has to provide values for all variables of
            the model except for the deterministic variables.

            reevaluate: Bool. If false it returns the output of the latest call. It avoid unnecessary computations when
            multiple children variables ask for the log probability of the same paternt variable.

        Returns:
            chainer.Variable. the log probability of the input values given the model.

        """
        if self._evaluated and not reevaluate:
            return 0.
        if self in input_values:
            value = input_values[self]
        else:
            value = self.value

        self._evaluated = True
        deterministic_parents_values = {
            parent: parent.value
            for parent in self.parents
            if (type(parent) is DeterministicVariable)
        }
        parents_input_values = {
            parent: parent_input
            for parent, parent_input in input_values.items()
            if parent in self.parents
        }
        parents_values = {
            **parents_input_values,
            **deterministic_parents_values
        }
        parameters_dict = self._apply_link(parents_values)
        log_probability = self.distribution.calculate_log_probability(
            value, **parameters_dict)
        parents_log_probability = sum([
            parent.calculate_log_probability(input_values, reevaluate)
            for parent in self.parents
        ])
        if self.is_observed:
            log_probability = F.sum(log_probability, axis=1, keepdims=True)
        if type(log_probability) is chainer.Variable and type(
                parents_log_probability) is chainer.Variable:
            log_probability, parents_log_probability = partial_broadcast(
                log_probability, parents_log_probability)
        return log_probability + parents_log_probability
예제 #3
0
    def calculate_log_probability(self, input_values, reevaluate=True):
        """
        Summary
        """
        if self._evaluated and not reevaluate:
            return 0.
        if self in input_values:
            value = input_values[self]
        else:
            value = self.value

        self._evaluated = True
        deterministic_parents_values = {
            parent: parent.value
            for parent in self.parents
            if (type(parent) is DeterministicVariable)
        }
        parents_input_values = {
            parent: parent_input
            for parent, parent_input in input_values.items()
            if parent in self.parents
        }
        parents_values = {
            **parents_input_values,
            **deterministic_parents_values
        }
        parameters_dict = self.apply_link(parents_values)
        log_probability = self.distribution.calculate_log_probability(
            value, **parameters_dict)
        parents_log_probability = sum([
            parent.calculate_log_probability(input_values, reevaluate)
            for parent in self.parents
        ])
        if self.is_observed:
            log_probability = F.sum(log_probability, axis=1, keepdims=True)
        if type(log_probability) is chainer.Variable and type(
                parents_log_probability) is chainer.Variable:
            log_probability, parents_log_probability = partial_broadcast(
                log_probability, parents_log_probability)
        return log_probability + parents_log_probability
예제 #4
0
equal_tensor_variable(xt, xc)

## sum_from_dim: torch sum
x = np.random.normal(size=(20, 5, 4, 2))
dim_index = 1
xt = utilities.sum_from_dim(torch.tensor(x), dim_index)
xc = utilities.sum_from_dim_chainer(chainer.Variable(x), dim_index)

equal_tensor_variable(xt, xc)

## partial_broadcast
xl = []
for i in range(1, 3):
    xl.append(np.random.normal(size=(20, i, 10, 3)))

xt = utilities.partial_broadcast(*[torch.tensor(x) for x in xl])
xc = utilities.partial_broadcast_chainer(*[chainer.Variable(x) for x in xl])

print([i.shape for i in xl])
print([i.numpy().shape for i in xt])
print([i.shape for i in xc])

## broadcast_and_squeeze
tensor_list = [torch.randn(10, 5), torch.randn(10, 5), torch.randn(10, 5)]
xt = utilities.broadcast_and_squeeze(*tensor_list)
#xc = utilities.broadcast_and_squeeze_chainer(*[chainer.Variable(x.numpy()) for x in tensor_list])

print([i.numpy().shape for i in xt])
#print([i.shape for i in xc])

# no torch.expand, no torch.repeat, no torch.view