Exemple #1
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_T_SeqRec,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)

        item_history_embedding_new = tf.concat([
            self.item_history_embedding,
            tf.expand_dims(self.timelast_history, -1)
        ], -1)
        item_history_embedding_new = tf.concat([
            item_history_embedding_new,
            tf.expand_dims(self.timenow_history, -1)
        ], -1)

        # RNN layer(1)
        with tf.name_scope('rnn'):
            rnn_outputs, final_state = dynamic_rnn(
                Time4LSTMCell(HIDDEN_SIZE),
                inputs=item_history_embedding_new,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="time4lstm")
            tf.summary.histogram('LSTM_outputs', rnn_outputs)

        last_inps = tf.concat([self.target_item_embedding, final_state[1]], 1)
        self.fcn_net(last_inps, use_dice=True)
Exemple #2
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_SLi_Rec_Fixed,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)

        # Attention layer(1)
        with tf.name_scope('Attention_layer_1'):
            att_outputs1, alphas1 = attention_HAN(
                self.item_history_embedding,
                attention_size=ATTENTION_SIZE,
                return_alphas=True)
            att_fea1 = tf.reduce_sum(att_outputs1, 1)
            tf.summary.histogram('att_fea1', att_fea1)

        item_history_embedding_new = tf.concat([
            self.item_history_embedding,
            tf.expand_dims(self.timelast_history, -1)
        ], -1)
        item_history_embedding_new = tf.concat([
            item_history_embedding_new,
            tf.expand_dims(self.timenow_history, -1)
        ], -1)

        # RNN layer
        with tf.name_scope('rnn'):
            rnn_outputs, final_state1 = dynamic_rnn(
                Time4LSTMCell(HIDDEN_SIZE),
                inputs=item_history_embedding_new,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="time4lstm")
            tf.summary.histogram('LSTM_outputs', rnn_outputs)

        # Attention layer(2)
        with tf.name_scope('Attention_layer_2'):
            att_outputs2, alphas2 = attention_FCN(self.target_item_embedding,
                                                  rnn_outputs,
                                                  ATTENTION_SIZE,
                                                  self.mask,
                                                  softmax_stag=1,
                                                  stag='1_1',
                                                  mode='LIST',
                                                  return_alphas=True)
            tf.summary.histogram('alpha_outputs2', alphas2)
            att_fea2 = tf.reduce_sum(att_outputs2, 1)
            tf.summary.histogram('att_fea2', att_fea2)

        # alpha
        with tf.name_scope('User_alpha'):
            user_alpha = 0.2
            user_embed = att_fea1 * user_alpha + att_fea2 * (1.0 - user_alpha)

        last_inps = tf.concat([self.target_item_embedding, user_embed], 1)
        self.fcn_net(last_inps, use_dice=True)
Exemple #3
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_NARM,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)

        # RNN layer(1)
        with tf.name_scope('rnn_1'):
            rnn_outputs1, final_state1 = dynamic_rnn(
                LSTMCell(HIDDEN_SIZE),
                inputs=self.item_history_embedding,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="lstm_1")
            tf.summary.histogram('LSTM_outputs1', rnn_outputs1)

        # RNN layer(2)
        with tf.name_scope('rnn_2'):
            rnn_outputs2, final_state2 = dynamic_rnn(
                LSTMCell(HIDDEN_SIZE),
                inputs=self.item_history_embedding,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="lstm_2")
            tf.summary.histogram('LSTM_outputs2', rnn_outputs2)

        # Attention layer
        with tf.name_scope('Attention_layer'):
            att_outputs, alphas = attention_FCN(final_state1[1],
                                                rnn_outputs2,
                                                ATTENTION_SIZE,
                                                self.mask,
                                                softmax_stag=1,
                                                stag='1_1',
                                                mode='LIST',
                                                return_alphas=True)
            tf.summary.histogram('alpha_outputs', alphas)
            att_fea = tf.reduce_sum(att_outputs, 1)

        last_inps = tf.concat(
            [final_state1[1], att_fea, self.target_item_embedding], 1)
        self.fcn_net(last_inps, use_dice=True)
Exemple #4
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_DIEN,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)

        # RNN layer(1)
        with tf.name_scope('rnn_1'):
            rnn_outputs, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE),
                                         inputs=self.item_history_embedding,
                                         sequence_length=self.sequence_length,
                                         dtype=tf.float32,
                                         scope="gru_1")
            tf.summary.histogram('GRU_outputs', rnn_outputs)

        # Attention layer
        with tf.name_scope('Attention_layer'):
            att_outputs, alphas = attention_FCN(self.target_item_embedding,
                                                rnn_outputs,
                                                ATTENTION_SIZE,
                                                self.mask,
                                                softmax_stag=1,
                                                stag='1_1',
                                                mode='LIST',
                                                return_alphas=True)
            tf.summary.histogram('alpha_outputs', alphas)

        # RNN layer(2)
        with tf.name_scope('rnn_2'):
            rnn_outputs2, final_state2 = dynamic_rnn(
                VecAttGRUCell(HIDDEN_SIZE),
                inputs=rnn_outputs,
                att_scores=tf.expand_dims(alphas, -1),
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="gru_2")
            tf.summary.histogram('GRU2_Final_State', final_state2)

        last_inps = tf.concat([self.target_item_embedding, final_state2], 1)
        self.fcn_net(last_inps, use_dice=True)
Exemple #5
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_LSTMPP,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)

        # Attention layer
        with tf.name_scope('Attention_layer'):
            att_outputs, alphas = attention_HAN(self.item_history_embedding,
                                                attention_size=ATTENTION_SIZE,
                                                return_alphas=True)
            att_fea = tf.reduce_sum(att_outputs, 1)
            tf.summary.histogram('att_fea', att_fea)

        # RNN layer
        with tf.name_scope('rnn'):
            rnn_outputs, final_state = dynamic_rnn(
                LSTMCell(HIDDEN_SIZE),
                inputs=self.item_history_embedding,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="lstm")
            tf.summary.histogram('LSTM_outputs', rnn_outputs)

        # alpha
        with tf.name_scope('User_alpha'):
            concat_all = tf.concat([
                self.target_item_embedding, att_fea, final_state[1],
                tf.expand_dims(self.timenow_history[:, -1], -1)
            ], 1)
            concat_att1 = tf.layers.dense(concat_all,
                                          80,
                                          activation=tf.nn.sigmoid,
                                          name='concat_att1')
            concat_att2 = tf.layers.dense(concat_att1,
                                          40,
                                          activation=tf.nn.sigmoid,
                                          name='concat_att2')
            user_alpha = tf.layers.dense(concat_att2,
                                         1,
                                         activation=tf.nn.sigmoid,
                                         name='concat_att3')
            user_embed = att_fea * user_alpha + final_state[1] * (1.0 -
                                                                  user_alpha)

        last_inps = tf.concat([self.target_item_embedding, user_embed], 1)
        self.fcn_net(last_inps, use_dice=True)
Exemple #6
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_LSTM,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)

        # RNN layer
        with tf.name_scope('rnn'):
            rnn_outputs, final_state = dynamic_rnn(
                LSTMCell(HIDDEN_SIZE),
                inputs=self.item_history_embedding,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="lstm")
            tf.summary.histogram('LSTM_outputs', rnn_outputs)

        last_inps = tf.concat([self.target_item_embedding, final_state[1]], 1)
        self.fcn_net(last_inps, use_dice=False)
Exemple #7
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_TC_SeqRec,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)

        item_history_embedding_new = tf.concat([
            self.item_history_embedding,
            tf.expand_dims(self.timelast_history, -1)
        ], -1)
        item_history_embedding_new = tf.concat([
            item_history_embedding_new,
            tf.expand_dims(self.timenow_history, -1)
        ], -1)

        # RNN layer
        with tf.name_scope('rnn'):
            rnn_outputs, final_state = dynamic_rnn(
                Time4LSTMCell(HIDDEN_SIZE),
                inputs=item_history_embedding_new,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="time4lstm")
            tf.summary.histogram('LSTM_outputs', rnn_outputs)

        # Attention layer
        with tf.name_scope('Attention_layer'):
            att_outputs, alphas = attention_FCN(self.target_item_embedding,
                                                rnn_outputs,
                                                ATTENTION_SIZE,
                                                self.mask,
                                                softmax_stag=1,
                                                stag='1_1',
                                                mode='LIST',
                                                return_alphas=True)
            tf.summary.histogram('alpha_outputs', alphas)
            att_fea = tf.reduce_sum(att_outputs, 1)

        last_inps = tf.concat([self.target_item_embedding, att_fea], 1)
        self.fcn_net(last_inps, use_dice=True)
Exemple #8
0
    def __init__(self, user_number, item_number, cate_number, EMBEDDING_DIM,
                 HIDDEN_SIZE, ATTENTION_SIZE):
        super(Model_CARNN,
              self).__init__(user_number, item_number, cate_number,
                             EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
        self.item_history_embedding = tf.concat([
            self.item_history_embedding,
            tf.expand_dims(self.timeinterval_history, -1)
        ], -1)

        # RNN layer
        with tf.name_scope('rnn'):
            rnn_outputs, final_state = dynamic_rnn(
                CARNNCell(HIDDEN_SIZE),
                inputs=self.item_history_embedding,
                sequence_length=self.sequence_length,
                dtype=tf.float32,
                scope="carnn")
            tf.summary.histogram('CARNN_outputs', rnn_outputs)

        last_inps = tf.concat([final_state, self.target_item_embedding], 1)
        self.fcn_net(last_inps, use_dice=True)