Exemplo n.º 1
0
from keras.engine import InputSpec, Model
from keras.layers.recurrent import LSTM
from keras.layers import activations, Wrapper
from keras.layers import Input, Embedding, Flatten, Dropout, Lambda, concatenate, Dense

if __name__ == "__main__":
    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
    os.environ["CUDA_VISIBLE_DEVICES"] = "1"

config = OrderedDict()
config.MAX_WINDOW_SIZE = 15
config.MAX_MENTION_LENGTH = 5
config.EMBEDDING_TRAINABLE = False
config.WORD_EMBEDDING_DIM = 100  #first
config.ENTITY_EMBEDDING_DIM = 100  #second
config.MAX_ENTITY_DESC_LENGTH = 100  #no
config.MENTION_CONTEXT_LATENT_SIZE = 200
config.LSTM_SIZE = 100
config.DROPOUT = 0.3
config.ACTIVATION_FUNCTION = 'tanh'

context_length = config.MAX_WINDOW_SIZE + config.MAX_MENTION_LENGTH
batch_size = 256

start_epochs = 0
epochs = 35
batch_epochs = 5

word_index, entity_indices, word_ebd, entity_ebd = tools.load_matrices()
save_path = './model/origin_rl_model.ckpt'