Esempio n. 1
0
from keras.layers.recurrent import LSTM
from keras.layers import activations, Wrapper
from keras.layers import Input, Embedding, Flatten, Dropout, Lambda, concatenate, Dense

if __name__ == "__main__":
    os.environ["CUDA_VISIBLE_DEVICES"] = "0"

config = OrderedDict()
config.MAX_WINDOW_SIZE = 10
config.MAX_MENTION_LENGTH = 10
config.EMBEDDING_TRAINABLE = False
config.WORD_EMBEDDING_DIM = 300  #first
#config.ENTITY_EMBEDDING_DIM = 300 #second
config.MAX_ENTITY_DESC_LENGTH = 150  #no
config.MENTION_CONTEXT_LATENT_SIZE = 50
config.LSTM_SIZE = 300
config.DROPOUT = 0.3
config.ACTIVATION_FUNCTION = 'tanh'
config.batch_size = 1024
config.num_of_neg = 1  #the number of negative sample of each senetence
config.start_epochs = 0  # the epoch of start
config.epochs = 5  #the number of iteration
config.batch_epochs = 1  #the number of batch of evaluate times

# reforcement learning config
config.updaterate = 1
config.num_epoch = 5
config.sampletimes = 1
config.negative_sample = 5

config.context_length = config.MAX_WINDOW_SIZE + config.MAX_MENTION_LENGTH
Esempio n. 2
0
from keras.layers import activations, Wrapper
from keras.layers import Input, Embedding, Flatten, Dropout, Lambda, concatenate, Dense

if __name__ == "__main__":
    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
    os.environ["CUDA_VISIBLE_DEVICES"] = "1"

config = OrderedDict()
config.MAX_WINDOW_SIZE = 15
config.MAX_MENTION_LENGTH = 5
config.EMBEDDING_TRAINABLE = False
config.WORD_EMBEDDING_DIM = 100  #first
config.ENTITY_EMBEDDING_DIM = 100  #second
config.MAX_ENTITY_DESC_LENGTH = 100  #no
config.MENTION_CONTEXT_LATENT_SIZE = 200
config.LSTM_SIZE = 100
config.DROPOUT = 0.3
config.ACTIVATION_FUNCTION = 'tanh'

context_length = config.MAX_WINDOW_SIZE + config.MAX_MENTION_LENGTH
batch_size = 256

start_epochs = 0
epochs = 35
batch_epochs = 5

word_index, entity_indices, word_ebd, entity_ebd = tools.load_matrices()
save_path = './model/origin_rl_model.ckpt'
save_path2 = './model/origin_rl_entity_model.ckpt'

test_dataset_2013 = './data/2013_prepare_filled.txt'