lstm.use_embedding_pre_train = exp.pp_data.use_embedding lstm.embed_trainable = False neuronios_by_layer = [16, 32, 64, 100, 128] epochs = [16, 32, 64, 100] batch_sizes = [20, 40, 80, 160] dropouts = [0.2, 0.3, 0.5] for neuronios in neuronios_by_layer: for batch_size in batch_sizes: for epoch in epochs: for dropout in dropouts: lstm.epochs = epoch lstm.batch_size = batch_size lstm.patience_train = epoch/2 exp.experiment_name = 'lstm_exp9_var_L2' + '_N' + str(neuronios) + '_B' + str(batch_size) + \ '_E' + str(epoch) + '_D' + str(dropout) lstm.model = Sequential() lstm.model.add(Embedding(exp.pp_data.vocabulary_size, exp.pp_data.embedding_size, trainable=lstm.embed_trainable)) lstm.model.add(LSTM(neuronios, activation='tanh', dropout=dropout, recurrent_dropout=dropout, return_sequences=True)) lstm.model.add(LSTM(neuronios, activation='tanh', dropout=dropout, recurrent_dropout=dropout)) lstm.model.add(Dense(1, activation='sigmoid')) time_ini_exp = datetime.datetime.now() # exp.k_fold_cross_validation(lstm) exp.test_hypeparams(lstm) exp.set_period_time_end(time_ini_exp, 'Total experiment')
# Train neuronios_by_layer = [16, 32] epochs = [16, 32, 64, 96, 128] batch_sizes = [20, 40, 80] np.random.seed(dn.SEED) time_ini_rep = datetime.datetime.now() x_train, y_train, x_valid, y_valid, num_words, embedding_matrix = exp.pp_data.load_data( ) exp.set_period_time_end(time_ini_rep, 'Load data') for neuronios in neuronios_by_layer: for batch_size in batch_sizes: for epoch in epochs: exp.experiment_name = 'lstm_exp14_L3' + '_N' + str( neuronios) + '_B' + str(batch_size) + '_E' + str(epoch) lstm.epochs = epoch lstm.batch_size = batch_size lstm.patience_train = epoch / 2 data_dim = exp.pp_data.max_terms_by_post timesteps = exp.pp_data.max_posts lstm.model = Sequential() lstm.model.add( LSTM(neuronios, activation='tanh', dropout=0.2, recurrent_dropout=0.2, return_sequences=True, stateful=True,