def t_student_log_pdf_tf(mean_matrix, chol_cov_matrices, data_point, nus,
                         cluster_counts):
    data_dim = tf.constant(data_point.get_shape()[0].value, dtype=tf.float32)
    kappas = tf.constant(init_kappa_0()) + cluster_counts

    scale_fact = tf.expand_dims(tf.expand_dims((kappas + 1.) / (kappas * nus),
                                               axis=-1),
                                axis=-1)
    chol_cov_scaled = tf.sqrt(scale_fact) * chol_cov_matrices

    chol_cov_diagonals = tf.matrix_diag_part(chol_cov_scaled)
    log_dets_sqrt = tf.reduce_sum(tf.log(chol_cov_diagonals), axis=-1)

    data_point_norm = tf.expand_dims(data_point, axis=0) - mean_matrix
    vecs = tf.squeeze(
        tf.linalg.triangular_solve(chol_cov_scaled,
                                   tf.expand_dims(data_point_norm, axis=-1),
                                   lower=True))
    vecs_norm = tf.norm(vecs, axis=-1)

    num = tf.math.lgamma((nus + data_dim) / 2.)

    denom = tf.math.lgamma(
        nus / 2.) + (data_dim / 2.) * (tf.log(nus) + np.log(np.pi))
    denom += log_dets_sqrt
    denom += (
        (nus + data_dim) / 2.) * math.log1psquare(vecs_norm / tf.sqrt(nus))

    return num - denom
Пример #2
0
  def _log_unnormalized_prob(self, value):
    value = value - self._loc
    value = self.scale.solve(value[..., tf.newaxis])

    num_dims = tf.cast(self.event_shape_tensor()[0], self.dtype)
    mahalanobis = tf.norm(value, axis=[-1, -2])
    return -(num_dims + self.df) / 2. * math.log1psquare(
        mahalanobis / tf.sqrt(self.df))
Пример #3
0
    def _forward_log_det_jacobian(self, x):
        # y = sinh((arcsinh(x) + skewness) * tailweight) * multiplier
        # Using sinh' = cosh, arcsinh'(x) = 1 / sqrt(x**2 + 1),
        # dy/dx
        # = cosh((arcsinh(x) + skewness) * tailweight) * tailweight / sqrt(x**2 + 1)
        # * multiplier

        tailweight = tf.convert_to_tensor(self.tailweight)

        return (tfp_math.log_cosh((tf.asinh(x) + self.skewness) * tailweight) -
                0.5 * tfp_math.log1psquare(x) + tf.math.log(tailweight) +
                tf.math.log(self._output_multiplier(tailweight)))
Пример #4
0
    def _inverse_log_det_jacobian(self, y):
        # x = sinh(arcsinh(y / multiplier) / tailweight - skewness)
        # Using sinh' = cosh, arcsinh'(y) = 1 / sqrt(y**2 + 1),
        # dx/dy
        # = cosh(arcsinh(y / multiplier) / tailweight - skewness)
        #     / (tailweight * sqrt((y / multiplier)**2 + 1)) / multiplier

        tailweight = tf.convert_to_tensor(self.tailweight)
        multiplier = self._output_multiplier(tailweight)
        y = y / multiplier

        return (tfp_math.log_cosh(tf.asinh(y) / tailweight - self.skewness) -
                0.5 * tfp_math.log1psquare(y) - tf.math.log(tailweight) -
                tf.math.log(multiplier))