def objective(params): # Here we define the metric we want to minimise curriculum_step, learning_rate, input_prob, hidden_prob, l2_reg = params stats, keys = curriculumLearning( epochs=0, repetitions=1, #do not really matter curriculum_step=curriculum_step, repeat_school_class=True, shuffle_cur_curriculum=True, reverse_order=True, self_paced=True, keep_probabilities=[input_prob, hidden_prob], lamda2=l2_reg, learningRate=learning_rate) #save everytime in case it crashes saveStatsCollection( filename='curr_learn_bay_opt_steps_repetitions_statsCollection.npy', key=(curriculum_step, learning_rate, input_prob, hidden_prob, l2_reg), stats=stats) if plotting: fig_1, ax_1, fig_2, ax_2 = plotStats(stats, keys) plt.show() validAccs = stats[:, -1] length10percent = len(validAccs) // 10 best10percent = np.sort(validAccs)[-length10percent:] # We want to maximise validation accuracy, i.e. minimise minus validation accuracy return -np.mean(best10percent)
def objective(params): # Here we define the metric we want to minimise input_keep_prob, hidden_keep_prob, hidden_dim, lamda2 = params epochs = 20 learning_rate = 1e-4 studentNN = StudentNN(batch_size=batch_size, rng=rng, dtype=curDtype, config=config) stats, keys = studentNN.teach_student( hidden_dim=hidden_dim, lamda2=lamda2, learning_rate=learning_rate, epochs=epochs, input_keep_prob=input_keep_prob, hidden_keep_prob=hidden_keep_prob, dataset_filename=dataset_filename, logits_filename=logits_filename, ) #save everytime in case it crashes filename = statsCollectionFilename statsCollection = np.load(filename)[( )] if os.path.isfile(filename) else dict() statsCollection[tuple(params)] = stats np.save(filename, statsCollection) if plotting: fig_1, ax_1, fig_2, ax_2 = plotStats(stats, keys) plt.show() validAccs = stats[:, -1] length10percent = len(validAccs) // 10 best10percent = np.sort(validAccs)[-length10percent:] # We want to maximise the MEAN validation accuracy, # i.e. minimise minus return -np.mean(best10percent)
def objective_min_epochs(params): # Here we define the metric we want to minimise (state_size, num_steps, learning_rate) = params targetValidAcc = 0.23 maxEpochs = 20 stats, metric = rnnModel.run_until(targetValidAcc = targetValidAcc, maxEpochs=maxEpochs, learning_rate=learning_rate, num_steps=num_steps, state_size =state_size) #save everytime in case it crashes filename = stats_coll_filename statsCollection = np.load(filename)[()] if os.path.isfile(filename) else dict() statsCollection[(state_size, num_steps, learning_rate)] = stats np.save(filename, statsCollection) if plotting: fig_1, ax_1, fig_2, ax_2 = plotStats(stats, DynStats.keys) plt.show() # We want to minimize the amount of epochs required to reach 23% accuracy return metric
def objective(params): # Here we define the metric we want to minimise (state_size, num_steps, learning_rate) = params epochs = 20 stats, keys = rnnModel.run_rnn(state_size = state_size, num_steps=num_steps, epochs = epochs, learning_rate = learning_rate) #save everytime in case it crashes filename = stats_coll_filename statsCollection = np.load(filename)[()] if os.path.isfile(filename) else dict() statsCollection[(state_size, num_steps, learning_rate)] = stats np.save(filename, statsCollection) if plotting: fig_1, ax_1, fig_2, ax_2 = plotStats(stats, keys) plt.show() # We want to maximise validation accuracy, i.e. minimise minus validation accuracy validAccs = stats[:, -1] length10percent = max(len(validAccs) // 10, 1) best10percent = np.sort(validAccs)[-length10percent:] return -np.mean(best10percent)
def onTrainEnd(stats, logits_dict): if plotting: fig_1, ax_1, fig_2, ax_2 = plotStats(stats, DynStats.keys) plt.show()