Esempio n. 1
0
def disc_mutual_info_loss(c_disc, aux_dist):
    """
    Mutual Information lower bound loss for discrete distribution.
    """
    reg_disc_dim = aux_dist.get_shape().as_list()[-1]
    cross_ent = -K.mean(K.sum(K.log(aux_dist + EPSILON) * c_disc, axis=1))
    ent = -K.mean(K.sum(K.log(1. / reg_disc_dim + EPSILON) * c_disc, axis=1))

    return -(ent - cross_ent)
Esempio n. 2
0
def poisson(y_true, y_pred):
  return K.mean(y_pred - y_true * K.log(y_pred + K.epsilon()), axis=-1)
Esempio n. 3
0
def kullback_leibler_divergence(y_true, y_pred):
  y_true = K.clip(y_true, K.epsilon(), 1)
  y_pred = K.clip(y_pred, K.epsilon(), 1)
  return K.sum(y_true * K.log(y_true / y_pred), axis=-1)
Esempio n. 4
0
def mean_squared_logarithmic_error(y_true, y_pred):
  first_log = K.log(K.clip(y_pred, K.epsilon(), None) + 1.)
  second_log = K.log(K.clip(y_true, K.epsilon(), None) + 1.)
  return K.mean(K.square(first_log - second_log), axis=-1)
Esempio n. 5
0
def logcosh(y_true, y_pred):
    def cosh(x):
        return (K.exp(x) + K.exp(-x)) / 2

    return K.mean(K.log(cosh(y_pred - y_true)), axis=-1)
Esempio n. 6
0
def poisson(y_true, y_pred):
  return K.mean(y_pred - y_true * K.log(y_pred + K.epsilon()), axis=-1)
Esempio n. 7
0
def kullback_leibler_divergence(y_true, y_pred):
  y_true = K.clip(y_true, K.epsilon(), 1)
  y_pred = K.clip(y_pred, K.epsilon(), 1)
  return K.sum(y_true * K.log(y_true / y_pred), axis=-1)
Esempio n. 8
0
def logcosh(y_true, y_pred):

  def cosh(x):
    return (K.exp(x) + K.exp(-x)) / 2

  return K.mean(K.log(cosh(y_pred - y_true)), axis=-1)
Esempio n. 9
0
def mean_squared_logarithmic_error(y_true, y_pred):
  first_log = K.log(K.clip(y_pred, K.epsilon(), None) + 1.)
  second_log = K.log(K.clip(y_true, K.epsilon(), None) + 1.)
  return K.mean(K.square(first_log - second_log), axis=-1)