#sample_weight_mode="temporal" tr_pubs = pub_ids[:int(len(pub_ids)*0.9)] val_pubs = pub_ids[int(len(pub_ids)*0.9):] train = subdata_getter(tr_pubs,data) validation = subdata_getter(val_pubs,data) tr_generator = DataGenerator(tr_pubs,train) val_generator = DataGenerator(val_pubs,validation) history = NBatchLogger() model.fit_generator(generator=tr_generator,shuffle=False, epochs=10, verbose=0,callbacks=[history]) #,callbacks=callbacks_list model.evaluate_generator(generator = val_generator, use_multiprocessing=True, verbose=0) history_save= [] history_save.append(history.train_log) history_save.append(history.val_log) pickle.dump(history_save, open('history_adam_0.01_d1', 'wb')) #unknow words to "UNK" data_te = pd.read_csv('df_concat_test.csv', encoding="latin1").fillna(method="ffill") data_te.loc[~data_te['Word'].isin(words),'Word'] = "UNK" # data_te = data_te[:20000] te_pub = list(set(data_te["Pub_id"].values)) test = subdata_getter(te_pub,data_te)