emb = args.wordemb.lower() print('Word Embedding: ', emb) if emb == 'glove': emb_file = '../../../../data/embedding/glove.6B.100d.txt' else: emb_file = None char_emb_file = args.charemb.lower() print('Char Embedding: ', char_emb_file) name = 'BaseLSTM' # catnlp config = Config() config.layers = 2 config.optim = 'Adam' config.char_features = 'CNN' config.lr = 0.015 config.hidden_dim = 200 config.bid_flag = True config.number_normalized = True data_initialization(config, train_file, dev_file, test_file) config.gpu = gpu config.word_features = name print('Word features: ', config.word_features) config.generate_instance(train_file, 'train') config.generate_instance(dev_file, 'dev') config.generate_instance(test_file, 'test') if emb_file: print('load word emb file...norm: ', config.norm_word_emb) config.build_word_pretain_emb(emb_file) if char_emb_file != 'none': print('load char emb file...norm: ', config.norm_char_emb) config.build_char_pretrain_emb(char_emb_file)
if emb == 'glove': emb_file = '../../../data/embedding/glove.6B.100d.txt' elif emb == 'glove300d': emb_file = '../../../data/embedding/glove.840B.300d.txt' else: emb_file = None char_emb_file = args.charemb.lower() print('Char Embedding: ', char_emb_file) name = 'LSTM' # catnlp config = Config() config.layers = 2 config.optim = 'Adam' config.char_features = 'CNN' config.word_emb_dim = 300 config.hidden_dim = 600 config.bid_flag = True config.number_normalized = True data_initialization(config, train_file, dev_file, test_file) config.gpu = gpu config.word_features = name print('Word features: ', config.word_features) config.generate_instance(train_file, 'train') config.generate_instance(dev_file, 'dev') config.generate_instance(test_file, 'test') if emb_file: print('load word emb file...norm: ', config.norm_word_emb) config.build_word_pretain_emb(emb_file) if char_emb_file != 'none': print('load char emb file...norm: ', config.norm_char_emb) config.build_char_pretrain_emb(char_emb_file)