def compute_step(cls): """ Computes next trial step length """ unix.cd(cls.path) m0 = loadnpy('m_new') p = loadnpy('p_new') f0 = loadtxt('f_new') g0 = loadtxt('s_new') x = cls.step_lens() f = cls.func_vals() # compute trial step length if PAR.SRCHTYPE == 'Backtrack': alpha = lib.backtrack2(f0, g0, x[1], f[1], b1=0.1, b2=0.5) elif PAR.SRCHTYPE == 'Bracket': FACTOR = 2. if any(f[1:] < f[0]) and (f[-2] < f[-1]): alpha = lib.polyfit2(x, f) elif any(f[1:] < f[0]): alpha = loadtxt('alpha') * FACTOR else: alpha = loadtxt('alpha') * FACTOR**-1 elif PAR.SRCHTYPE == 'Fixed': alpha = cls.step_ratio * (step + 1) * PAR.STEPLEN else: raise ValueError # write trial model savetxt('alpha', alpha) savenpy('m_try', m0 + p * alpha)
def initial_step(self): """ Determines first trial step in line search; see eg Nocedal and Wright 2e section 3.5 """ alpha = loadtxt('alpha') s_new = loadtxt('s_new') s_old = loadtxt('s_old') s_ratio = s_new / s_old return 2. * s_ratio * alpha
def initial_step(self): """ Determines first trial step in line search; see eg Nocedal and Wright 2e section 3.5 """ alpha = loadtxt('alpha') s_new = loadtxt('s_new') s_old = loadtxt('s_old') s_ratio = s_new/s_old return 2.*s_ratio*alpha
def update_status(self): """ Updates line search status Maintains line search history by keeping track of step length and function value from each trial model evaluation. From line search history, determines whether stopping criteria have been satisfied. """ unix.cd(PATH.OPTIMIZE) x_ = loadtxt('alpha') f_ = loadtxt('f_try') if np.isnan(f_): raise ValueError # update search history self.search_history += [[x_, f_]] self.step_count += 1 x = self.step_lens() f = self.func_vals() fmin = f.min() imin = f.argmin() # is current step length the best so far? vals = self.func_vals(sort=False) if np.all(vals[-1] < vals[:-1]): self.isbest = 1 # are stopping criteria satisfied? if PAR.LINESEARCH == 'Fixed': if (fmin < f[0]) and any(fmin < f[imin:]): self.isdone = 1 #elif PAR.LINESEARCH == 'Bracket' or \ # self.iter == 1 or self.restarted: elif PAR.LINESEARCH == 'Bracket': if self.isbrak: self.isdone = 1 elif (fmin < f[0]) and any(fmin < f[imin:]): self.isbrak = 1 elif PAR.LINESEARCH == 'Backtrack': if fmin < f[0]: self.isdone = 1 # update log self.stepwriter(steplen=x_, funcval=f_) return self.isdone
def update_status(self): """ Updates line search status Maintains line search history by keeping track of step length and function value from each trial model evaluation. From line search history, determines whether stopping criteria have been satisfied. """ unix.cd(PATH.OPTIMIZE) x_ = loadtxt('alpha') f_ = loadtxt('f_try') if np.isnan(f_): raise ValueError # update search history self.search_history += [[x_, f_]] self.step_count += 1 x = self.step_lens() f = self.func_vals() fmin = f.min() imin = f.argmin() # is current step length the best so far? vals = self.func_vals(sort=False) if np.all(vals[-1] < vals[:-1]): self.isbest = 1 # are stopping criteria satisfied? if PAR.LINESEARCH == 'Fixed': if (fmin < f[0]) and any(fmin < f[imin:]): self.isdone = 1 elif PAR.LINESEARCH == 'Bracket' or \ self.iter == 1 or self.restarted: if self.isbrak: self.isdone = 1 elif (fmin < f[0]) and any(fmin < f[imin:]): self.isbrak = 1 elif PAR.LINESEARCH == 'Backtrack': if fmin < f[0]: self.isdone = 1 # update log self.stepwriter(steplen=x_, funcval=f_) return self.isdone
def compute_direction(cls): """ Computes model update direction from stored function and gradient values """ unix.cd(cls.path) m_new = loadnpy('m_new') f_new = loadtxt('f_new') g_new = loadnpy('g_new') if PAR.SCHEME == 'GradientDescent': p_new = -g_new elif PAR.SCHEME == 'ConjugateGradient': # compute NLCG udpate p_new = cls.NLCG.compute() elif PAR.SCHEME == 'QuasiNewton': # compute L-BFGS update if cls.iter == 1: p_new = -g_new else: cls.LBFGS.update() p_new = -cls.LBFGS.solve() # save results unix.cd(cls.path) savenpy('p_new', p_new) savetxt('s_new', np.dot(g_new, p_new))
def test_saveloadtxt(self): tmp_file = NamedTemporaryFile(mode='wb', delete=False) tmp_file.close() x = 3.14159265359 tools.savetxt(tmp_file.name, x) self.assertAlmostEqual(x, tools.loadtxt(tmp_file.name), 6)
def compute_step(self): """ Computes next trial step length """ unix.cd(PATH.OPTIMIZE) m = self.load('m_new') g = self.load('g_new') p = self.load('p_new') s = loadtxt('s_new') norm_m = max(abs(m)) norm_p = max(abs(p)) p_ratio = float(norm_m / norm_p) x = self.step_lens() f = self.func_vals() # compute trial step length if PAR.LINESEARCH == 'Fixed': alpha = p_ratio * (self.step_count + 1) * PAR.STEPINIT #elif PAR.LINESEARCH == 'Bracket' or \ # self.iter == 1 or self.restarted: elif PAR.LINESEARCH == 'Bracket': if any(f[1:] < f[0]) and (f[-2] < f[-1]): alpha = polyfit2(x, f) elif any(f[1:] <= f[0]): alpha = loadtxt('alpha') * PAR.STEPFACTOR**-1 else: alpha = loadtxt('alpha') * PAR.STEPFACTOR elif PAR.LINESEARCH == 'Backtrack': # calculate slope along 1D profile slope = s / self.dot(g, g)**0.5 if PAR.ADHOCFACTOR: slope *= PAR.ADHOCFACTOR alpha = backtrack2(f[0], slope, x[1], f[1], b1=0.1, b2=0.5) # write trial model corresponding to chosen step length savetxt('alpha', alpha) self.save('m_try', m + alpha * p)
def compute_step(self): """ Computes next trial step length """ unix.cd(PATH.OPTIMIZE) m = self.load('m_new') g = self.load('g_new') p = self.load('p_new') s = loadtxt('s_new') norm_m = max(abs(m)) norm_p = max(abs(p)) p_ratio = float(norm_m/norm_p) x = self.step_lens() f = self.func_vals() # compute trial step length if PAR.LINESEARCH == 'Fixed': alpha = p_ratio*(self.step_count + 1)*PAR.STEPINIT elif PAR.LINESEARCH == 'Bracket' or \ self.iter==1 or self.restarted: if any(f[1:] < f[0]) and (f[-2] < f[-1]): alpha = polyfit2(x, f) elif any(f[1:] <= f[0]): alpha = loadtxt('alpha')*PAR.STEPFACTOR**-1 else: alpha = loadtxt('alpha')*PAR.STEPFACTOR elif PAR.LINESEARCH == 'Backtrack': # calculate slope along 1D profile slope = s/self.dot(g,g)**0.5 if PAR.ADHOCFACTOR: slope *= PAR.ADHOCFACTOR alpha = backtrack2(f[0], slope, x[1], f[1], b1=0.1, b2=0.5) # write trial model corresponding to chosen step length savetxt('alpha', alpha) self.save('m_try', m + alpha*p)
def search_status(cls): """ Determines status of line search """ unix.cd(cls.path) f0 = loadtxt('f_new') g0 = loadtxt('s_new') x_ = loadtxt('alpha') f_ = loadtxt('f_try') if np.isnan(f_): raise ValueError cls.search_history += [[x_, f_]] x = cls.step_lens() f = cls.func_vals() # is current step length the best so far? vals = cls.func_vals(sort=False) if np.all(vals[-1] < vals[:-1]): cls.isbest = 1 # are stopping criteria satisfied? if PAR.SRCHTYPE == 'Backtrack': if any(f[1:] < f[0]): cls.isdone = 1 elif PAR.SRCHTYPE == 'Bracket': if cls.isbrak: cls.isbest = 1 cls.isdone = 1 elif any(f[1:] < f[0]) and (f[-2] < f[-1]): cls.isbrak = 1 elif PAR.SRCHTYPE == 'Fixed': if any(f[1:] < f[0]) and (f[-2] < f[-1]): cls.isdone = 1 cls.writer([], x_, f_) return cls.isdone, cls.isbest
def __init__(self, path='.', load=loadnpy, save=savenpy, thresh=1., maxiter=np.inf, precond=None): self.path = path self.load = load self.save = save self.maxiter = maxiter self.thresh = thresh self.precond = precond try: self.iter = loadtxt(self.path+'/'+'NLCG/iter') except IOError: unix.mkdir(self.path+'/'+'NLCG') self.iter = 0
def finalize_search(self): """ Cleans working directory and writes updated model """ unix.cd(PATH.OPTIMIZE) m = self.load('m_new') g = self.load('g_new') p = self.load('p_new') s = loadtxt('s_new') x = self.step_lens() f = self.func_vals() # clean working directory unix.rm('alpha') unix.rm('m_try') unix.rm('f_try') if self.iter > 1: unix.rm('m_old') unix.rm('f_old') unix.rm('g_old') unix.rm('p_old') unix.rm('s_old') unix.mv('m_new', 'm_old') unix.mv('f_new', 'f_old') unix.mv('g_new', 'g_old') unix.mv('p_new', 'p_old') unix.mv('s_new', 's_old') # write updated model alpha = x[f.argmin()] savetxt('alpha', alpha) self.save('m_new', m + alpha * p) savetxt('f_new', f.min()) # append latest statistics self.writer('factor', -self.dot(g, g)**-0.5 * (f[1] - f[0]) / (x[1] - x[0])) self.writer('gradient_norm_L1', np.linalg.norm(g, 1)) self.writer('gradient_norm_L2', np.linalg.norm(g, 2)) self.writer('misfit', f[0]) self.writer('restarted', self.restarted) self.writer('slope', (f[1] - f[0]) / (x[1] - x[0])) self.writer('step_count', self.step_count) self.writer('step_length', x[f.argmin()]) self.writer('theta', 180. * np.pi**-1 * angle(p, -g)) self.stepwriter.newline()
def initialize_search(cls): """ Determines initial step length for line search """ unix.cd(cls.path) if cls.iter == 1: s_new = loadtxt('s_new') f_new = loadtxt('f_new') else: s_old = loadtxt('s_old') s_new = loadtxt('s_new') f_old = loadtxt('f_old') f_new = loadtxt('f_new') alpha = loadtxt('alpha') m = loadnpy('m_new') p = loadnpy('p_new') # reset search history cls.search_history = [[0., f_new]] cls.isdone = 0 cls.isbest = 0 cls.isbrak = 0 # determine initial step length len_m = max(abs(m)) len_d = max(abs(p)) cls.step_ratio = float(len_m / len_d) if cls.iter == 1: assert PAR.STEPLEN != 0. alpha = PAR.STEPLEN * cls.step_ratio elif PAR.SRCHTYPE in ['Bracket']: alpha *= 2. * s_old / s_new elif PAR.SCHEME in ['GradientDescent', 'ConjugateGradient']: alpha *= 2. * s_old / s_new else: alpha = 1. # ad hoc scaling if PAR.ADHOCSCALING: alpha *= PAR.ADHOCSCALING # limit maximum step length if PAR.STEPMAX > 0.: if alpha / cls.step_ratio > PAR.STEPMAX: alpha = PAR.STEPMAX * cls.step_ratio # write trial model savenpy('m_try', m + p * alpha) savetxt('alpha', alpha) cls.writer(cls.iter, 0., f_new)
def finalize_search(self): """ Cleans working directory and writes updated model """ unix.cd(PATH.OPTIMIZE) m = self.load('m_new') g = self.load('g_new') p = self.load('p_new') s = loadtxt('s_new') x = self.step_lens() f = self.func_vals() # clean working directory unix.rm('alpha') unix.rm('m_try') unix.rm('f_try') if self.iter > 1: unix.rm('m_old') unix.rm('f_old') unix.rm('g_old') unix.rm('p_old') unix.rm('s_old') unix.mv('m_new', 'm_old') unix.mv('f_new', 'f_old') unix.mv('g_new', 'g_old') unix.mv('p_new', 'p_old') unix.mv('s_new', 's_old') # write updated model alpha = x[f.argmin()] savetxt('alpha', alpha) self.save('m_new', m + alpha*p) savetxt('f_new', f.min()) # append latest statistics self.writer('factor', -self.dot(g,g)**-0.5 * (f[1]-f[0])/(x[1]-x[0])) self.writer('gradient_norm_L1', np.linalg.norm(g, 1)) self.writer('gradient_norm_L2', np.linalg.norm(g, 2)) self.writer('misfit', f[0]) self.writer('restarted', self.restarted) self.writer('slope', (f[1]-f[0])/(x[1]-x[0])) self.writer('step_count', self.step_count) self.writer('step_length', x[f.argmin()]) self.writer('theta', 180.*np.pi**-1*angle(p,-g)) self.stepwriter.newline()
def finalize_search(self): """ Cleans working directory and writes updated model """ unix.cd(PATH.OPTIMIZE) m = self.load("m_new") g = self.load("g_new") p = self.load("p_new") s = loadtxt("s_new") x = self.step_lens() f = self.func_vals() # clean working directory unix.rm("alpha") unix.rm("m_try") unix.rm("f_try") if self.iter > 1: unix.rm("m_old") unix.rm("f_old") unix.rm("g_old") unix.rm("p_old") unix.rm("s_old") unix.mv("m_new", "m_old") unix.mv("f_new", "f_old") unix.mv("g_new", "g_old") unix.mv("p_new", "p_old") unix.mv("s_new", "s_old") # write updated model alpha = x[f.argmin()] savetxt("alpha", alpha) self.save("m_new", m + alpha * p) savetxt("f_new", f.min()) # append latest output self.writer("factor", -self.dot(g, g) ** -0.5 * (f[1] - f[0]) / (x[1] - x[0])) self.writer("gradient_norm_L1", np.linalg.norm(g, 1)) self.writer("gradient_norm_L2", np.linalg.norm(g, 2)) self.writer("misfit", f[0]) self.writer("restarted", self.restarted) self.writer("slope", (f[1] - f[0]) / (x[1] - x[0])) self.writer("step_count", self.step_count) self.writer("step_length", x[f.argmin()]) self.writer("theta", 180.0 * np.pi ** -1 * angle(p, -g)) self.stepwriter.newline()
def initialize_search(self): """ Determines initial step length for line search """ unix.cd(PATH.OPTIMIZE) m = self.load('m_new') p = self.load('p_new') f = loadtxt('f_new') norm_m = max(abs(m)) norm_p = max(abs(p)) p_ratio = float(norm_m/norm_p) # reset search history self.search_history = [[0., f]] self.step_count = 0 self.isdone = 0 self.isbest = 0 self.isbrak = 0 # determine initial step length if self.iter == 1: alpha = p_ratio*PAR.STEPINIT elif self.restarted: alpha = p_ratio*PAR.STEPINIT elif PAR.SCHEME in ['LBFGS']: alpha = 1. else: alpha = self.initial_step() # optional ad hoc scaling if PAR.STEPOVERSHOOT: alpha *= PAR.STEPOVERSHOOT # optional maximum step length safegaurd if PAR.STEPTHRESH: if alpha > p_ratio * PAR.STEPTHRESH and \ self.iter > 1: alpha = p_ratio * PAR.STEPTHRESH # write trial model corresponding to chosen step length savetxt('alpha', alpha) self.save('m_try', m + alpha*p) # upate log self.stepwriter(steplen=0., funcval=f)
def initialize_search(self): """ Determines initial step length for line search """ unix.cd(PATH.OPTIMIZE) m = self.load('m_new') p = self.load('p_new') f = loadtxt('f_new') norm_m = max(abs(m)) norm_p = max(abs(p)) p_ratio = float(norm_m / norm_p) # reset search history self.search_history = [[0., f]] self.step_count = 0 self.isdone = 0 self.isbest = 0 self.isbrak = 0 # determine initial step length if self.iter == 1: alpha = p_ratio * PAR.STEPINIT elif self.restarted: alpha = p_ratio * PAR.STEPINIT elif PAR.SCHEME in ['LBFGS']: alpha = 1. else: alpha = self.initial_step() # optional ad hoc scaling if PAR.STEPOVERSHOOT: alpha *= PAR.STEPOVERSHOOT # optional maximum step length safegaurd if PAR.STEPTHRESH: if alpha > p_ratio * PAR.STEPTHRESH and \ self.iter > 1: alpha = p_ratio * PAR.STEPTHRESH # write trial model corresponding to chosen step length savetxt('alpha', alpha) self.save('m_try', m + alpha * p) # upate log self.stepwriter(steplen=0., funcval=f)
def update(self, ap): unix.cd(self.path) self.ilcg += 1 x = self.load('LCG/x') r = self.load('LCG/r') y = self.load('LCG/y') p = self.load('LCG/p') ry = loadtxt('LCG/ry') pap = np.dot(p, ap) if pap < 0: print ' Stopping LCG [negative curvature]' isdone = True return isdone alpha = ry/pap x += alpha*p r += alpha*ap self.save('LCG/x', x) self.save('LCG/r', r) # check status if self.check_status(ap) == 0: isdone = True elif self.ilcg >= self.maxiter: isdone = True else: isdone = False if not isdone: y = self.apply_precond(r) ry_old = ry ry = np.dot(r, y) beta = ry/ry_old p = -y + beta*p self.save('LCG/y', y) self.save('LCG/p', p) savetxt('LCG/ry', np.dot(r, y)) return isdone
def update(self, ap): unix.cd(self.path) self.ilcg += 1 x = self.load('LCG/x') r = self.load('LCG/r') y = self.load('LCG/y') p = self.load('LCG/p') ry = loadtxt('LCG/ry') pap = np.dot(p, ap) if pap < 0: print ' Stopping LCG [negative curvature]' isdone = True return isdone alpha = ry / pap x += alpha * p r += alpha * ap self.save('LCG/x', x) self.save('LCG/r', r) # check status if self.check_status(ap) == 0: isdone = True elif self.ilcg >= self.maxiter: isdone = True else: isdone = False if not isdone: y = self.apply_precond(r) ry_old = ry ry = np.dot(r, y) beta = ry / ry_old p = -y + beta * p self.save('LCG/y', y) self.save('LCG/p', p) savetxt('LCG/ry', np.dot(r, y)) return isdone
def initial_step(self): alpha = loadtxt('alpha') s_new = loadtxt('s_new') s_old = loadtxt('s_old') s_ratio = s_new/s_old return 2.*s_ratio*alpha
def initial_step(self): alpha = loadtxt("alpha") s_new = loadtxt("s_new") s_old = loadtxt("s_old") s_ratio = s_new / s_old return 2.0 * s_ratio * alpha