Exemple #1
0
def main():
    print("load initial model ...")

    param_nn = cPickle.load(open(DIS_MODEL_FILE_NN))
    assert param_nn is not None

    discriminator = DIS(FEATURE_SIZE,
                        HIDDEN_SIZE,
                        D_WEIGHT_DECAY,
                        D_LEARNING_RATE,
                        loss='log',
                        param=param_nn)
    generator = GEN(FEATURE_SIZE,
                    HIDDEN_SIZE,
                    G_WEIGHT_DECAY,
                    G_LEARNING_RATE,
                    param=param_nn)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    sess.run(tf.initialize_all_variables())

    print('start adversarial training')

    p_best_val = 0.0
    ndcg_best_val = 0.0

    for epoch in range(30):
        if epoch > 0:
            # G generate negative for D, then train D
            print('Training D ...')
            generate_for_d(sess, generator, DIS_TRAIN_FILE)
            train_size = ut.file_len(DIS_TRAIN_FILE)

            for d_epoch in range(30):
                index = 1
                while True:
                    if index > train_size:
                        break
                    if index + BATCH_SIZE <= train_size + 1:
                        input_pos, input_neg = ut.get_batch_data(
                            DIS_TRAIN_FILE, index, BATCH_SIZE)
                    else:
                        input_pos, input_neg = ut.get_batch_data(
                            DIS_TRAIN_FILE, index, train_size - index + 1)
                    index += BATCH_SIZE

                    _ = sess.run(discriminator.d_updates,
                                 feed_dict={
                                     discriminator.pos_data: input_pos,
                                     discriminator.neg_data: input_neg
                                 })

                p_5 = precision_at_k(sess,
                                     discriminator,
                                     query_pos_test,
                                     query_pos_train,
                                     query_url_feature,
                                     k=5)
                ndcg_5 = ndcg_at_k(sess,
                                   discriminator,
                                   query_pos_test,
                                   query_pos_train,
                                   query_url_feature,
                                   k=5)

                if p_5 > p_best_val:
                    p_best_val = p_5
                    ndcg_best_val = ndcg_5
                    discriminator.save_model(sess, GAN_MODEL_BEST_FILE)
                    print("Best: ", "dis p@5 ", p_5, "dis ndcg@5 ", ndcg_5)
                elif p_5 == p_best_val:
                    if ndcg_5 > ndcg_best_val:
                        ndcg_best_val = ndcg_5
                        discriminator.save_model(sess, GAN_MODEL_BEST_FILE)
                        print("Best: ", "dis p@5 ", p_5, "dis ndcg@5 ", ndcg_5)

        # Train G
        print('Training G ...')
        for g_epoch in range(50):  # 50
            for query in query_pos_train.keys():
                pos_list = query_pos_train[query]
                # candidate_list = list(set(query_url_feature[query].keys()) - set(pos_list))
                candidate_list = list(query_url_feature[query].keys())

                if len(candidate_list) <= 0:
                    continue

                candidate_list_feature = [
                    query_url_feature[query][url] for url in candidate_list
                ]
                candidate_list_feature = np.asarray(candidate_list_feature)
                candidate_list_score = sess.run(
                    generator.pred_score,
                    {generator.pred_data: candidate_list_feature})

                # softmax for all
                exp_rating = np.exp(candidate_list_score)
                prob = exp_rating / np.sum(exp_rating)

                neg_index = np.random.choice(np.arange(len(candidate_list)),
                                             size=[len(pos_list)],
                                             p=prob)
                neg_list = np.array(candidate_list)[neg_index]

                pos_list_feature = [
                    query_url_feature[query][url] for url in pos_list
                ]
                neg_list_feature = [
                    query_url_feature[query][url] for url in neg_list
                ]
                neg_index = np.asarray(neg_index)
                # every negative samples have a reward
                neg_reward = sess.run(discriminator.reward,
                                      feed_dict={
                                          discriminator.pos_data:
                                          pos_list_feature,
                                          discriminator.neg_data:
                                          neg_list_feature
                                      })

                # Method 1: softmax before gather
                _ = sess.run(generator.gan_updates,
                             feed_dict={
                                 generator.pred_data: candidate_list_feature,
                                 generator.sample_index: neg_index,
                                 generator.reward: neg_reward
                             })

    print('Best p@5: ', p_best_val, 'Best ndcg@5: ', ndcg_best_val)
Exemple #2
0
def main():
    discriminator = DIS(FEATURE_SIZE,
                        HIDDEN_SIZE,
                        WEIGHT_DECAY,
                        D_LEARNING_RATE,
                        loss='log',
                        param=None)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    sess.run(tf.initialize_all_variables())

    print('start dynamic negative sampling with log ranking discriminator')
    p_best_val = 0.0
    ndcg_best_val = 0.0

    for epoch in range(200):
        generate_dns(sess, discriminator, DIS_TRAIN_FILE)
        train_size = ut.file_len(DIS_TRAIN_FILE)

        index = 1
        while True:
            if index > train_size:
                break
            if index + BATCH_SIZE <= train_size + 1:
                input_pos, input_neg = ut.get_batch_data(
                    DIS_TRAIN_FILE, index, BATCH_SIZE)
            else:
                input_pos, input_neg = ut.get_batch_data(
                    DIS_TRAIN_FILE, index, train_size - index + 1)
            index += BATCH_SIZE

            input_pos = np.asarray(input_pos)
            input_neg = np.asarray(input_neg)

            _ = sess.run(discriminator.d_updates,
                         feed_dict={
                             discriminator.pos_data: input_pos,
                             discriminator.neg_data: input_neg
                         })

        p_5 = precision_at_k(sess,
                             discriminator,
                             query_pos_test,
                             query_pos_train,
                             query_url_feature,
                             k=5)
        ndcg_5 = ndcg_at_k(sess,
                           discriminator,
                           query_pos_test,
                           query_pos_train,
                           query_url_feature,
                           k=5)

        if p_5 > p_best_val:
            p_best_val = p_5
            discriminator.save_model(sess, DNS_MODEL_BEST_FILE)
            print("Best: ", " p@5 ", p_5, "ndcg@5 ", ndcg_5)
        elif p_5 == p_best_val:
            if ndcg_5 > ndcg_best_val:
                ndcg_best_val = ndcg_5
                discriminator.save_model(sess, DNS_MODEL_BEST_FILE)
                print("Best: ", " p@5 ", p_5, "ndcg@5 ", ndcg_5)

    sess.close()
    param_best = cPickle.load(open(DNS_MODEL_BEST_FILE))
    assert param_best is not None
    discriminator_best = DIS(FEATURE_SIZE,
                             HIDDEN_SIZE,
                             WEIGHT_DECAY,
                             D_LEARNING_RATE,
                             loss='log',
                             param=param_best)

    sess = tf.Session(config=config)
    sess.run(tf.initialize_all_variables())

    p_1_best = precision_at_k(sess,
                              discriminator_best,
                              query_pos_test,
                              query_pos_train,
                              query_url_feature,
                              k=1)
    p_3_best = precision_at_k(sess,
                              discriminator_best,
                              query_pos_test,
                              query_pos_train,
                              query_url_feature,
                              k=3)
    p_5_best = precision_at_k(sess,
                              discriminator_best,
                              query_pos_test,
                              query_pos_train,
                              query_url_feature,
                              k=5)
    p_10_best = precision_at_k(sess,
                               discriminator_best,
                               query_pos_test,
                               query_pos_train,
                               query_url_feature,
                               k=10)

    ndcg_1_best = ndcg_at_k(sess,
                            discriminator_best,
                            query_pos_test,
                            query_pos_train,
                            query_url_feature,
                            k=1)
    ndcg_3_best = ndcg_at_k(sess,
                            discriminator_best,
                            query_pos_test,
                            query_pos_train,
                            query_url_feature,
                            k=3)
    ndcg_5_best = ndcg_at_k(sess,
                            discriminator_best,
                            query_pos_test,
                            query_pos_train,
                            query_url_feature,
                            k=5)
    ndcg_10_best = ndcg_at_k(sess,
                             discriminator_best,
                             query_pos_test,
                             query_pos_train,
                             query_url_feature,
                             k=10)

    map_best = MAP(sess, discriminator_best, query_pos_test, query_pos_train,
                   query_url_feature)
    mrr_best = MRR(sess, discriminator_best, query_pos_test, query_pos_train,
                   query_url_feature)

    print("Best ", "p@1 ", p_1_best, "p@3 ", p_3_best, "p@5 ", p_5_best,
          "p@10 ", p_10_best)
    print("Best ", "ndcg@1 ", ndcg_1_best, "ndcg@3 ", ndcg_3_best, "ndcg@5 ",
          ndcg_5_best, "p@10 ", ndcg_10_best)
    print("Best MAP ", map_best)
    print("Best MRR ", mrr_best)
Exemple #3
0
D_WEIGHT_DECAY = 0.001
D_LEARNING_RATE = 0.0001

workdir = 'MQ2008-semi'
GAN_PAIRWISE_MODEL_BEST_FILE = workdir + '/gan/gan_best_nn.model'

query_url_feature =\
    ut.load_all_query_url_feature(workdir + '/Large_norm.txt', FEATURE_SIZE)
query_pos_train = ut.get_query_pos(workdir + '/train.txt')
query_pos_test = ut.get_query_pos(workdir + '/test.txt')

param_best = cPickle.load(open(GAN_PAIRWISE_MODEL_BEST_FILE))
assert param_best is not None
discriminator_best = DIS(FEATURE_SIZE,
                         HIDDEN_SIZE,
                         D_WEIGHT_DECAY,
                         D_LEARNING_RATE,
                         loss='log',
                         param=param_best)

config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
sess.run(tf.initialize_all_variables())

p_1_best = precision_at_k(sess,
                          discriminator_best,
                          query_pos_test,
                          query_pos_train,
                          query_url_feature,
                          k=1)
p_3_best = precision_at_k(sess,