Beispiel #1
0
 def call(self, inputs, mask=None, **kwargs):
     input, key_masks = inputs
     querys, keys, values = input, input, input
     align = self.attention([querys, keys])
     output = self.softmax_weight_sum([align, values, key_masks])
     if self.use_layer_norm:
         output = self.layer_norm(output)
     return reduce_mean(output, 1, keep_dims=True)
Beispiel #2
0
    def call(self, inputs, mask=None, **kwargs):
        user_query, keys, keys_length = inputs
        hist_len = keys.get_shape()[1]
        key_masks = tf.sequence_mask(keys_length, hist_len)
        query = self.dense(user_query)

        align = self.attention([query, keys])

        output = self.softmax_weight_sum([align, keys, key_masks])

        if self.use_res:
            output += keys
        return reduce_mean(output, 1, keep_dims=True)
Beispiel #3
0
 def call(self, seq_value_len_list, mask=None, **kwargs):
     if not isinstance(seq_value_len_list, list):
         seq_value_len_list = [seq_value_len_list]
     if len(seq_value_len_list) == 1:
         return seq_value_len_list[0]
     expand_seq_value_len_list = list(map(lambda x: tf.expand_dims(x, axis=-1), seq_value_len_list))
     a = concat_func(expand_seq_value_len_list)
     if self.mode == "mean":
         hist = reduce_mean(a, axis=-1, )
     if self.mode == "sum":
         hist = reduce_sum(a, axis=-1, )
     if self.mode == "max":
         hist = reduce_max(a, axis=-1, )
     return hist