예제 #1
0
    def custom_loss(y_true, y_pred):

        y_true_label_1 = y_true[:, :num_classes]
        y_true_label_2 = y_true[:, num_classes:num_classes * 2]
        y_pred_label_1 = y_pred[:, :num_classes]
        y_pred_label_2 = y_pred[:, num_classes:num_classes * 2]

        y_pred_embedding_1 = y_pred[:,
                                    num_classes * 2:num_classes * 2 + emb_size]
        y_pred_embedding_2 = y_pred[:, num_classes * 2 + emb_size:]

        class_loss_1 = categorical_crossentropy(y_true_label_1, y_pred_label_1)
        class_loss_2 = categorical_crossentropy(y_true_label_2, y_pred_label_2)
        embedding_loss = cosine_similarity(y_pred_embedding_1,
                                           y_pred_embedding_2)

        are_labels_equal = K.all(K.equal(y_true_label_1, y_true_label_2),
                                 axis=1)

        a = tf.where(are_labels_equal,
                     tf.fill([tf.shape(are_labels_equal)[0]], 1.0),
                     tf.fill([tf.shape(are_labels_equal)[0]], -1.0))

        result = class_loss_1 + class_loss_2 + tf.math.multiply(
            a, embedding_loss)

        return tf.math.reduce_mean(result)
예제 #2
0
 def __getitem__(self, index):
     if index == self.last_index:
         return [self.images1, self.images2], self.rsas
     for i, item in enumerate(
             self.indexes[index * self.batch_size:(index + 1) *
                          self.batch_size]):
         self.images1[i], self.neuro1[i] = self.generator[self.pairs[item]
                                                          [0]]
         self.images2[i], self.neuro2[i] = self.generator[self.pairs[item]
                                                          [1]]
     self.rsas = cosine_similarity(self.neuro1, self.neuro2)
     self.last_index = index
     return [self.images1, self.images2], self.rsas
예제 #3
0
 def consistency_loss(self, y, x, mask):
     '''
     y (b, h, w, c) : feature map of the propagation encoder
     x (b, h, w, c) : featrue map of the momentum encoder
     mask (b, h*w, h*w) : mask applied to x
     '''
     _, h, w, c = x.shape
     y = tf.reshape(y, (-1, h * w, c))
     x = tf.reshape(x, (-1, h * w, c))
     cos = -cosine_similarity(y[:, :, None, :],
                              tf.stop_gradient(x)[:, None, :, :],
                              axis=-1)  # (-1, h*w, h*w)
     cos *= mask  # (-1, h*w, h*w)
     cos = tf.reduce_sum(cos, axis=(1, 2))  # (-1, 1)
     mask_cnt = tf.math.count_nonzero(mask, axis=(1, 2),
                                      dtype=tf.float32)  # (-1, 1)
     cos = tf.math.divide_no_nan(cos, mask_cnt)  # (-1, 1)
     cos = tf.reduce_mean(cos)  # (,)
     return cos
예제 #4
0
    def call(self, inputs, training=None):
        shape = K.int_shape(inputs)
        _, h, w, c = shape
        t = inputs
        for transform in self.transforms:
            if 'bn' == transform[0]:
                t = tf.nn.relu(transform[1](t, training))
            else:
                t = transform[1](t)

        t = tf.reshape(t, (-1, h * w, c))
        x = tf.reshape(inputs, (-1, h * w, c))

        sim = -cosine_similarity(x[:, :, None, :], x[:, None, :, :],
                                 axis=-1)  # (-1, h*w, h*w)
        sim = tf.nn.relu(sim)
        sim = tf.pow(sim, self.gamma)

        y = tf.matmul(sim, t)  # (-1, h*w, c)
        y = tf.reshape(y, (-1, h, w, c))
        return y
예제 #5
0
 def __init__(self, **kwargs):
     super().__init__(lambda x: cosine_similarity(x[0], x[1]),
                      output_shape=lambda x: x[0],
                      **kwargs)
예제 #6
0
    # average out all sentance to one
    embeddings = tf.math.reduce_mean(embeddings, axis=(0, 1))

    with tf.compat.v1.Session() as sess:
        sess.run(tf.compat.v1.global_variables_initializer())
        sess.run(tf.compat.v1.tables_initializer())
        return sess.run(tf.constant(embeddings))


i = 0
review_vector_frame = df[['_id', 'name', 'category_id', 'category', 'review']]
del df
# review_vector_frame = review_vector_frame[0:10]


# Parallel Processing Method
p = mp.Pool(mp.cpu_count())
review_vector_frame['review_vector'] = p.map(elmo_vectors, review_vector_frame['review'])

# #Apply Method
# review_vector_frame['review_vector'] = review_vector_frame['review'].apply(elmo_vectors)


# Cosine Similarity
for key, value in feature_vectors.items():
    v = [ele for ele in value.numpy() for i in range(review_vector_frame.shape[0])]
    similarity = np.abs(np.array(cosine_similarity(review_vector_frame['review_vector'].tolist(), v))) * 100
    review_vector_frame[key] = similarity
review_vector_frame = review_vector_frame.drop(['category', 'review_vector'], axis=1)
review_vector_frame.to_csv("results/CRM_categorization.csv", index=False)
예제 #7
0
    global i
    i += 1
    print("Row count = ", i, "Time =", round(time.time(), 2))
    print("=================================================")
    with tf.compat.v1.Session() as sess:
        sess.run(tf.compat.v1.global_variables_initializer())
        sess.run(tf.compat.v1.tables_initializer())
        return sess.run(tf.constant(embeddings))


i = 0
review_vector_frame = df[['_id', 'name', 'category_id', 'category', 'review']]
del df
review_vector_frame = review_vector_frame[0:10]
review_vector_frame['review_vector'] = review_vector_frame['review'].apply(
    elmo_vectors)

for key, value in feature_vectors.items():
    v = [
        ele for ele in value.numpy()
        for i in range(review_vector_frame.shape[0])
    ]
    similarity = np.abs(
        np.array(
            cosine_similarity(review_vector_frame['review_vector'].tolist(),
                              v))) * 100
    review_vector_frame[key] = similarity
review_vector_frame = review_vector_frame.drop(['category', 'review_vector'],
                                               axis=1)
review_vector_frame.to_csv("results/CRM_categorization.csv", index=False)