def reconstruct_potential_sd(grid_in, refdens_in, startpot_in, occs_in, denserr=1e-4, method=None): from bfgs_only_fprime import fmin_bfgs_onlygrad, fmin_newton_onlygrad from scipy.optimize import fmin_bfgs funcs = CalcFuncGrad(grid_in, occs_in, refdens_in, startpot_in * grid_in.r, lambd=1e-8) #funcs = CalcFuncGrad(grid_in, occs_in, refdens_in, startpot_in*grid_in.r, lambd=0.0) if (method is None) or (method == 'BFGS'): recpot = fmin_bfgs(funcs.lagrangian, startpot_in * grid_in.r, fprime=funcs.gradient, callback=funcs.info, norm=2, disp=True, gtol=1e-8) / grid_in.r elif (method == 'BFGS_OnlyGrad'): recpot = fmin_bfgs_onlygrad(funcs.gradient, startpot_in * grid_in.r, invhess=funcs.invhess, callback=funcs.info, gtol=1e-10) / grid_in.r else: raise Exception('Unknown optimization method') return recpot - startpot_in
def reconstruct_spinpotential_sd(grid_in, refdens_alpha_in, refdens_beta_in, startpot_in, occs_alpha_in, occs_beta_in, lamb_tot=0.0, lamb_spin=0.0, gradnorm=1e-8): from bfgs_only_fprime import fmin_bfgs_onlygrad, fmin_newton_onlygrad from scipy.optimize import fmin_bfgs funcs = CalcFuncGradSpin(grid_in, occs_alpha_in, occs_beta_in, refdens_alpha_in, refdens_beta_in, startpot_in * grid_in.r, lamb_tot, lamb_spin) startpot = numpy.concatenate( [startpot_in * grid_in.r, startpot_in * grid_in.r]) recpot = fmin_bfgs_onlygrad(funcs.gradient, startpot, invhess=funcs.invhess, callback=funcs.info, gtol=gradnorm) return recpot[:grid_in.N] / grid_in.r - startpot_in, recpot[ grid_in.N:] / grid_in.r - startpot_in,
def reconstruct_restrpotential_sd(grid_in, refdens_tot_in, startpot_in, occs_in, ons, lamb, gradnorm=1e-8): from bfgs_only_fprime import fmin_bfgs_onlygrad, fmin_newton_onlygrad from scipy.optimize import fmin_bfgs funcs = CalcFuncGrad(grid_in, occs_in, refdens_tot_in, startpot_in * grid_in.r, ons, lamb) recpot = fmin_bfgs_onlygrad(funcs.gradient, startpot_in * grid_in.r, invhess=funcs.invhess, callback=funcs.info, gtol=gradnorm) return recpot / grid_in.r - startpot_in