示例#1
0
def sparse_categorical_crossentropy(y_true, y_pred):
    # y_true需要重新明确一下shape和dtype
    y_true = K.reshape(y_true, K.shape(y_pred)[:-1])
    y_true = K.cast(y_true, 'int32')
    y_true = K.one_hot(y_true, K.shape(y_pred)[2])
    # 计算交叉熵
    return K.mean(K.categorical_crossentropy(y_true, y_pred))
def sparse_categorical_crossentropy(y_true, y_pred):
    """自定义稀疏交叉熵
    这主要是因为keras自带的sparse_categorical_crossentropy不支持求二阶梯度。
    """
    y_true = K.reshape(y_true, K.shape(y_pred)[:-1])
    y_true = K.cast(y_true, 'int32')
    y_true = K.one_hot(y_true, K.shape(y_pred)[-1])
    return K.categorical_crossentropy(y_true, y_pred)
示例#3
0
文件: simbert.py 项目: yyht/simbert
 def compute_loss_of_similarity(self, inputs, mask=None):
     _, _, y_pred, _ = inputs
     y_true = self.get_labels_of_similarity(y_pred)  # 构建标签
     y_pred = K.l2_normalize(y_pred, axis=1)  # 句向量归一化
     similarities = K.dot(y_pred, K.transpose(y_pred))  # 相似度矩阵
     similarities = similarities - K.eye(K.shape(y_pred)[0]) * 1e12  # 排除对角线
     similarities = similarities * 30  # scale
     loss = K.categorical_crossentropy(y_true,
                                       similarities,
                                       from_logits=True)
     return loss
示例#4
0
 def compute_loss_of_similarity(self, inputs, mask=None):
     _, _, y_pred, _ = inputs
     y_true = self.get_labels_of_similarity(
         y_pred)  # 构建标签  (btz,btz) 左右两个btz互为true
     y_pred = K.l2_normalize(y_pred, axis=1)  # 句向量归一化 (?, 768)
     similarities = K.dot(y_pred, K.transpose(y_pred))  # 相似度矩阵 (btz,btz)
     similarities = similarities - K.eye(K.shape(
         y_pred)[0]) * 1e12  # 排除对角线,因为对角线的是自己跟自己比 (btz,btz) 对角线上的值会变得无穷小
     similarities = similarities * 30  # scale (btz,btz)
     loss = K.categorical_crossentropy(
         y_true, similarities, from_logits=True)  # (?,)  由此可以计算一个btz内的句子相似度
     return loss
示例#5
0
    def compute_loss(self, inputs, mask=None):
        q_start_in, q_end_in, q_label_in, ps_category, ps_heads, ps_tails = inputs
        if mask is None:
            mask = 1.0
        else:
            mask = K.cast(mask, K.floatx())
        loss0 = K.sparse_categorical_crossentropy(q_label_in,
                                                  ps_category,
                                                  from_logits=True)
        loss0 = K.mean(loss0)
        loss0 = K.sum(loss0 * mask) / K.sum(mask)

        loss1 = K.categorical_crossentropy(q_start_in,
                                           ps_heads,
                                           from_logits=True)
        loss1 = K.mean(loss1)

        ps_tails = ps_tails - (1 - K.cumsum(q_start_in, axis=1)) * 1e10
        loss2 = K.mean(
            K.categorical_crossentropy(q_end_in, ps_tails, from_logits=True))
        loss2 = K.mean(loss2)
        # 总的loss
        return loss0 + loss1 + loss2