Exemplo n.º 1
0
# target = np.array([9999] * len(word_data)) # useless since loss function make it times with 0
if os.path.exists(conf.path_checker):
    print("load previous checker")
    # model.load_weights(conf.path_checker)

# model.fit(
#     {"word_idx": word_data, "item_pos_idx": item_pos_data, "item_neg_idx": item_neg_data},
#     {"merge_layer": target, "pos_layer": target},
#     batch_size=conf.batch_size, nb_epoch=conf.n_epoch, validation_split=0.1,
#     callbacks=[my_checker_point(item_embed, word_embed, model, conf),
#                # my_value_checker([word_embed_, item_pos_embed_, item_neg_embed_, pos_layer_, neg_layer_, merge_layer_]),
#                ModelCheckpoint(filepath=conf.path_checker, verbose=1, save_best_only=True)])

dp.generate_init()
model.fit_generator(generator=dp.generate_data(batch_size=conf.batch_size,
                                               is_val=False),
                    nb_worker=1,
                    pickle_safe=False,
                    nb_epoch=conf.n_epoch,
                    samples_per_epoch=conf.sample_per_epoch,
                    validation_data=dp.generate_data(
                        batch_size=conf.batch_size, is_val=True),
                    nb_val_samples=1913599,
                    verbose=1,
                    callbacks=[
                        my_checker_point(item_embed, word_embed, model, conf),
                        ModelCheckpoint(filepath=conf.path_checker,
                                        verbose=1,
                                        save_best_only=True)
                    ])
Exemplo n.º 2
0
print(model.summary())

# target = np.array([9999] * len(word_data)) # useless since loss function make it times with 0
if os.path.exists(conf.path_checker):
    print("load previous checker")
    # model.load_weights(conf.path_checker)

# model.fit(
#     {"word_idx": word_data, "item_pos_idx": item_pos_data, "item_neg_idx": item_neg_data},
#     {"merge_layer": target, "pos_layer": target},
#     batch_size=conf.batch_size, nb_epoch=conf.n_epoch, validation_split=0.1,
#     callbacks=[my_checker_point(item_embed, word_embed, model, conf),
#                # my_value_checker([word_embed_, item_pos_embed_, item_neg_embed_, pos_layer_, neg_layer_, merge_layer_]),
#                ModelCheckpoint(filepath=conf.path_checker, verbose=1, save_best_only=True)])

dp.generate_init()
model.fit_generator(
    generator=dp.generate_data(batch_size=conf.batch_size, is_val=False),
    nb_worker=1,
    pickle_safe=False,
    nb_epoch=conf.n_epoch,
    samples_per_epoch=conf.sample_per_epoch,
    #validation_data = dp.generate_data(batch_size=conf.batch_size, is_val=True), nb_val_samples=1913599,
    verbose=1,
    callbacks=[
        my_checker_point(item_embed, word_embed, model, conf),
        ModelCheckpoint(filepath=conf.path_checker,
                        verbose=1,
                        save_best_only=True)
    ])
Exemplo n.º 3
0
    loss = y_pred + 0 * y_true
    return loss

model.compile(optimizer=Adam(lr=0.001), loss = {'merge_layer' : ranking_loss, "pos_layer": dummy_loss}, loss_weights=[1, 0])

print("finish model compiling")
print(model.summary())

# target = np.array([9999] * len(word_data)) # useless since loss function make it times with 0
if os.path.exists(conf.path_checker):
    print("load previous checker")
    # model.load_weights(conf.path_checker)

# model.fit(
#     {"word_idx": word_data, "item_pos_idx": item_pos_data, "item_neg_idx": item_neg_data},
#     {"merge_layer": target, "pos_layer": target},
#     batch_size=conf.batch_size, nb_epoch=conf.n_epoch, validation_split=0.1,
#     callbacks=[my_checker_point(item_embed, word_embed, model, conf),
#                # my_value_checker([word_embed_, item_pos_embed_, item_neg_embed_, pos_layer_, neg_layer_, merge_layer_]),
#                ModelCheckpoint(filepath=conf.path_checker, verbose=1, save_best_only=True)])

dp.generate_init()
model.fit_generator(generator=dp.generate_data(batch_size=conf.batch_size, is_val=False), nb_worker=1, pickle_safe=False,
                    nb_epoch=conf.n_epoch, samples_per_epoch=conf.sample_per_epoch,
                    validation_data = dp.generate_data(batch_size=conf.batch_size, is_val=True), nb_val_samples=1913599,
                    verbose=1,
                    callbacks=[
                        my_checker_point(item_embed, word_embed, model, conf),
                        ModelCheckpoint(filepath=conf.path_checker, verbose=1, save_best_only=True)
                    ])