def ADA(self, num_passes=20): """ Train model using ADAgrad with various learning rates """ for eta in 10**np.linspace(-3, 1, 5): self.learner_name = "ADAGrad %.4f" % eta print("\n\n" + self.learner_name) self.optimizer = ADAGrad(self.f_df_wrapper_flattened, self.xinit_flat.copy(), self.model.subfunction_references, learning_rate=eta) x = self.optimizer.optimize(num_passes=num_passes) print(np.mean(self.optimizer.f), "average value at last evaluation")