Пример #1
0
def embedding_separation_loss(y_embeddings, name='emb_sep_L'):
    """
    Compute Embedding separation Loss on embedding vectors.

    y_embeddings: shape (num_clusters, latent_dim) - for each cluster i, y_embeddings[i] is the
    corresponding embedding of cluster in latent dimension.

    return: A Embedding separation loss (how separate are the clusters in the latent space). Only L1
    loss in latent space considered so far.
    """
    embedding_column = tf.expand_dims(y_embeddings,
                                      axis=1)  # shape K, 1, latent_dim
    embedding_row = tf.expand_dims(y_embeddings,
                                   axis=0)  # shape 1, K, latent_dim

    # Compute L1 distance
    pairwise_loss = tf.reduce_sum(squared_difference(embedding_column,
                                                     embedding_row),
                                  axis=-1)  # shape K, K
    loss = -tf.reduce_sum(pairwise_loss, axis=None, name=name)

    return loss
Пример #2
0
 def update_state(self, y_true, y_pred, sample_weight=None):
     y_true = tf.cast(y_true, self._dtype)
     y_pred = tf.cast(y_pred, self._dtype)
     error = math.squared_difference(y_true, y_pred)
     return super(RMSE, self).update_state(error,
                                           sample_weight=sample_weight)
Пример #3
0
def mse(pred, label):
    # Mean Squared Error
    loss = math.squared_difference(label, pred)

    return math.reduce_sum(loss) / len(pred)
Пример #4
0
def nrmse(y_true, y_pred):
    return tfm.sqrt(
        tfm.reduce_mean(tfm.squared_difference(y_true, y_pred)) /
        tfm.reduce_mean(y_true**2))