parser = get_parser() p = parser.parse_args() # Parameters BATCH_SIZE = p.batch_size EPOCH = p.n_epochs DROPOUT = p.dropout LAYERS = p.layers PATIENCE = p.patience HIDDEN_SIZE = p.n_hidden PREFIX = 'exp/'+p.prefix + '/' os.system('mkdir -p '+PREFIX) FOOTPRINT = 'H' + str(HIDDEN_SIZE) + '_L' + str(LAYERS) + '_D' + str(DROPOUT) ### get data X_tr, Y_tr, X_val, Y_val, X_test,Y_test = prepare_ass() print('data loaded...') DIM = X_tr[0][0].shape[2] N_tr = len(X_tr) N_val = len(X_val) print('building model...') model = Sequential() model.add(Dense(HIDDEN_SIZE, input_shape = (DIM,),W_regularizer= l1l2(l1 = 0.00001, l2 = 0.00001))) model.add(Activation('relu')) for layer in xrange(LAYERS-1): model.add(Dense(HIDDEN_SIZE,W_regularizer= l1l2(l1 = 0.00001, l2 = 0.00001))) model.add(Activation('relu')) model.add(Dropout(DROPOUT))
# Parameters BATCH_SIZE = p.batch_size EPOCH = p.n_epochs DROPOUT = p.dropout LAYERS = p.layers PATIENCE = p.patience HIDDEN_SIZE = p.n_hidden FINETUNE = p.finetune PREFIX = 'exp/'+p.prefix + '/' os.system('mkdir -p '+PREFIX) FOOTPRINT = 'M' + str(p.model) + '_U' + str(p.unit) + '_H' + str(HIDDEN_SIZE) + '_L' + str(LAYERS) + '_D' + str(DROPOUT) + '_TR' + p.train + '_FT' + str(FINETUNE) ### get data [X_tr_q,X_tr_a], Y_tr, [X_val_q,X_val_a], Y_val, [X_test_q,X_test_a],Y_test, [tr_length_a,val_length_a,test_length_a], [word_idx,idx_word], embedding_weights = prepare_ass(train = TR[p.train], mini_batch = True, fp = PREFIX + FOOTPRINT, finetune = FINETUNE) b_X_tr, b_Y_tr = distribute_buckets(tr_length_a, [X_tr_q, X_tr_a], [Y_tr], step_size = 20, x_set = set([1]), y_set = set()) print('data loaded...') if FINETUNE: DIM = 0 else: DIM = X_tr_q.shape[2] MAX_Q = X_tr_q.shape[1] MAX_A = X_tr_a.shape[1] print('building model...')
parser = get_parser() p = parser.parse_args() # Parameters BATCH_SIZE = p.batch_size EPOCH = p.n_epochs DROPOUT = p.dropout LAYERS = p.layers PATIENCE = p.patience HIDDEN_SIZE = p.n_hidden PREFIX = 'exp/' + p.prefix + '/' os.system('mkdir -p ' + PREFIX) FOOTPRINT = 'H' + str(HIDDEN_SIZE) + '_L' + str(LAYERS) + '_D' + str(DROPOUT) ### get data X_tr, Y_tr, X_val, Y_val, X_test, Y_test = prepare_ass() print('data loaded...') DIM = X_tr[0][0].shape[2] N_tr = len(X_tr) N_val = len(X_val) print('building model...') model = Sequential() model.add( Dense(HIDDEN_SIZE, input_shape=(DIM, ), W_regularizer=l1l2(l1=0.00001, l2=0.00001))) model.add(Activation('relu')) for layer in xrange(LAYERS - 1): model.add(Dense(HIDDEN_SIZE, W_regularizer=l1l2(l1=0.00001, l2=0.00001)))
HIDDEN_SIZE = p.n_hidden FINETUNE = p.finetune PREFIX = 'exp/' + p.prefix + '/' os.system('mkdir -p ' + PREFIX) FOOTPRINT = 'M' + str(p.model) + '_U' + str( p.unit) + '_H' + str(HIDDEN_SIZE) + '_L' + str(LAYERS) + '_D' + str( DROPOUT) + '_TR' + p.train + '_FT' + str(FINETUNE) ### get data [X_tr_q, X_tr_a], Y_tr, [X_val_q, X_val_a], Y_val, [X_test_q, X_test_a], Y_test, [ tr_length_a, val_length_a, test_length_a ], [word_idx, idx_word], embedding_weights = prepare_ass(train=TR[p.train], mini_batch=True, fp=PREFIX + FOOTPRINT, finetune=FINETUNE) b_X_tr, b_Y_tr = distribute_buckets(tr_length_a, [X_tr_q, X_tr_a], [Y_tr], step_size=20, x_set=set([1]), y_set=set()) print('data loaded...') if FINETUNE: DIM = 0 else: DIM = X_tr_q.shape[2] MAX_Q = X_tr_q.shape[1]