def test_cmaes_random_quadratic(): # -- instantiating the function myfct, opt_vec = get_func(n) # -- Testing! res = cmaes(n, myfct, stop) assert np.abs(res - opt_vec).sum() < 1e-3
def stop(g, m, opt_vec=opt_vec, eps=eps): assert m.ndim == 1 assert opt_vec is not None max_g = 2e2 * m.size ** 2 if g > max_g: return False error = np.abs(m - opt_vec).mean() if error > eps: return True else: return False return stop # -- Running convergence tests with dimensionality convergence_histories = {} for n in N_L: key = '%i' % n myfct, opt_vec = get_func(n) mystop = stop_factory(opt_vec=opt_vec) m_opt, m_history = cmaes(n, myfct, mystop, range_min=0, range_max=0.3*n) convergence_histories[key] = m_history # -- dump data cPickle.dump(convergence_histories, open('convergence_history.pkl', 'w'))
return False def test_cmaes_random_quadratic(): # -- instantiating the function myfct, opt_vec = get_func(n) # -- Testing! res = cmaes(n, myfct, stop) assert np.abs(res - opt_vec).sum() < 1e-3 if __name__ == "__main__": def get_stop(max_iterations=1000): def mystop(g): if g <= max_iterations: return True else: return False return mystop n = 50 max_iterations = 2e1*n**2 myfct, opt_vec = get_func(n) stop = get_stop(max_iterations=max_iterations) res = cmaes(n, myfct, stop, range_min=0, range_max=0.3*n)
print("predicted num: %d" % pred) plt.figure(1) plt.clf() plt.imshow(initX.reshape(28, 28), cmap=plt.cm.gray) plt.colorbar() plt.title("predicted num: %d" % pred) plt.show() mlpfunc(initX, entry=None) #%% from cmaes import cmaes #%% XOut = cmaes(fitnessfunc=mlpfunc, N=784, initX=initX, maximize=True, save=True, savemrk='mlp4-out1', stopsigma=1e-5) # After see the data, I found the `sigma ` keeps going up and diverge to 1E19 after the first few trials # so the data is not good on this sense #%% XOut2 = cmaes(fitnessfunc=mlpfunc, N=784, initX=initX, maximize=True, save=True, savemrk='mlp4-out1', stopsigma=1e-5) #%%