def test_saveloadtxt(self): tmp_file = NamedTemporaryFile(mode='wb', delete=False) tmp_file.close() x = 3.14159265359 tools.savetxt(tmp_file.name, x) self.assertAlmostEqual(x, tools.loadtxt(tmp_file.name), 6)
def test_savetxt(self): filename = "tmp_savetxt" x = 3.14159265359 tools.savetxt(filename, x) with open(filename, 'r') as f: y = float(f.readline()) os.remove(filename) self.assertAlmostEqual(x, y, places=6)
def evaluate_gradient(cls): m = loadnpy('m_new') f = problem.func(m) g = problem.grad(m) savetxt('f_new', f) savenpy('g_new', g) if PAR.OPTIMIZE in ['SRVM']: optimize.update_SRVM()
def restart(self): """ Discards history of algorithm; prepares to start again from gradient direction """ g = self.load('g_new') self.save('p_new', -g) savetxt('s_new', self.dot(g, g)) self.restarted = 1 self.stepwriter.iter -= 1 self.stepwriter.newline()
def finalize_search(self): """ Cleans working directory and writes updated model """ m = self.load('m_new') g = self.load('g_new') p = self.load('p_new') s = self.loadtxt('s_new') x = self.step_lens() f = self.func_vals() # clean working directory unix.cd(PATH.OPTIMIZE) unix.rm('alpha') unix.rm('m_try') unix.rm('f_try') if self.iter > 1: unix.rm('m_old') unix.rm('f_old') unix.rm('g_old') unix.rm('p_old') unix.rm('s_old') unix.mv('m_new', 'm_old') unix.mv('f_new', 'f_old') unix.mv('g_new', 'g_old') unix.mv('p_new', 'p_old') unix.mv('s_new', 's_old') # write updated model alpha = x[f.argmin()] savetxt('alpha', alpha) self.save('m_new', m + alpha * p) savetxt('f_new', f.min()) # append latest statistics self.writer('factor', -self.dot(g, g)**-0.5 * (f[1] - f[0]) / (x[1] - x[0])) self.writer('gradient_norm_L1', np.linalg.norm(g, 1)) self.writer('gradient_norm_L2', np.linalg.norm(g, 2)) self.writer('misfit', f[0]) self.writer('restarted', self.restarted) self.writer('slope', (f[1] - f[0]) / (x[1] - x[0])) self.writer('step_count', self.step_count) self.writer('step_length', x[f.argmin()]) self.writer('theta', 180. * np.pi**-1 * angle(p, -g)) self.stepwriter.newline()
def initialize(self): unix.mkdir(self.path+'/'+'LCG') unix.cd(self.path) self.iter += 1 self.ilcg = 0 r = self.load('g_new') x = np.zeros(r.size) self.save('LCG/x', x) self.save('LCG/r', r) y = self.apply_precond(r) p = -y self.save('LCG/y', y) self.save('LCG/p', p) savetxt('LCG/ry', np.dot(r, y))
def initialize(self): unix.mkdir(self.path + '/' + 'LCG') unix.cd(self.path) self.iter += 1 self.ilcg = 0 r = self.load('g_new') x = np.zeros(r.size) self.save('LCG/x', x) self.save('LCG/r', r) y = self.apply_precond(r) p = -y self.save('LCG/y', y) self.save('LCG/p', p) savetxt('LCG/ry', np.dot(r, y))
def update(self, ap): unix.cd(self.path) self.ilcg += 1 x = self.load('LCG/x') r = self.load('LCG/r') y = self.load('LCG/y') p = self.load('LCG/p') ry = loadtxt('LCG/ry') pap = np.dot(p, ap) if pap < 0: print ' Stopping LCG [negative curvature]' isdone = True return isdone alpha = ry/pap x += alpha*p r += alpha*ap self.save('LCG/x', x) self.save('LCG/r', r) # check status if self.check_status(ap) == 0: isdone = True elif self.ilcg >= self.maxiter: isdone = True else: isdone = False if not isdone: y = self.apply_precond(r) ry_old = ry ry = np.dot(r, y) beta = ry/ry_old p = -y + beta*p self.save('LCG/y', y) self.save('LCG/p', p) savetxt('LCG/ry', np.dot(r, y)) return isdone
def update(self, ap): unix.cd(self.path) self.ilcg += 1 x = self.load('LCG/x') r = self.load('LCG/r') y = self.load('LCG/y') p = self.load('LCG/p') ry = loadtxt('LCG/ry') pap = np.dot(p, ap) if pap < 0: print ' Stopping LCG [negative curvature]' isdone = True return isdone alpha = ry / pap x += alpha * p r += alpha * ap self.save('LCG/x', x) self.save('LCG/r', r) # check status if self.check_status(ap) == 0: isdone = True elif self.ilcg >= self.maxiter: isdone = True else: isdone = False if not isdone: y = self.apply_precond(r) ry_old = ry ry = np.dot(r, y) beta = ry / ry_old p = -y + beta * p self.save('LCG/y', y) self.save('LCG/p', p) savetxt('LCG/ry', np.dot(r, y)) return isdone
def initialize_search(self): """ Determines initial step length for line search """ m = self.load('m_new') p = self.load('p_new') f = self.loadtxt('f_new') norm_m = max(abs(m)) norm_p = max(abs(p)) p_ratio = float(norm_m / norm_p) # reset search history self.search_history = [[0., f]] self.step_count = 0 self.isdone = 0 self.isbest = 0 self.isbrak = 0 # determine initial step length if self.iter == 1: alpha = p_ratio * PAR.STEPINIT elif self.restarted: alpha = p_ratio * PAR.STEPINIT elif PAR.OPTIMIZE in ['LBFGS']: alpha = 1. else: alpha = self.initial_step() # optional ad hoc scaling if PAR.STEPOVERSHOOT: alpha *= PAR.STEPOVERSHOOT # optional maximum step length safegaurd if PAR.STEPTHRESH: if alpha > p_ratio * PAR.STEPTHRESH and \ self.iter > 1: alpha = p_ratio * PAR.STEPTHRESH # write trial model corresponding to chosen step length savetxt('alpha', alpha) self.save('m_try', m + alpha * p) # append latest statistics self.stepwriter(steplen=0., funcval=f)
def __call__(self): """ Returns NLCG search direction """ self.iter += 1 savetxt(self.path + '/' + 'NLCG/iter', self.iter) unix.cd(self.path) g_new = self.load('g_new') if self.iter == 1: return -g_new, 0 elif self.iter > self.maxiter: print 'restarting NLCG... [periodic restart]' self.restart() return -g_new, 1 # compute search direction g_old = self.load('g_old') p_old = self.load('p_old') if self.precond: beta = pollak_ribere(g_new, g_old, self.precond) p_new = -self.precond(g_new) + beta * p_old else: beta = pollak_ribere(g_new, g_old) p_new = -g_new + beta * p_old # check restart conditions if check_conjugacy(g_new, g_old) > self.thresh: print 'restarting NLCG... [loss of conjugacy]' self.restart() return -g_new, 1 elif check_descent(p_new, g_new) > 0.: print 'restarting NLCG... [not a descent direction]' self.restart() return -g_new, 1 else: return p_new, 0
def __call__(self): """ Returns NLCG search direction """ self.iter += 1 savetxt(self.path+'/'+'NLCG/iter', self.iter) unix.cd(self.path) g_new = self.load('g_new') if self.iter == 1: return -g_new, 0 elif self.iter > self.maxiter: print 'restarting NLCG... [periodic restart]' self.restart() return -g_new, 1 # compute search direction g_old = self.load('g_old') p_old = self.load('p_old') if self.precond: beta = pollak_ribere(g_new, g_old, self.precond) p_new = -self.precond(g_new) + beta*p_old else: beta = pollak_ribere(g_new, g_old) p_new = -g_new + beta*p_old # check restart conditions if check_conjugacy(g_new, g_old) > self.thresh: print 'restarting NLCG... [loss of conjugacy]' self.restart() return -g_new, 1 elif check_descent(p_new, g_new) > 0.: print 'restarting NLCG... [not a descent direction]' self.restart() return -g_new, 1 else: return p_new, 0
def evaluate_function(cls): m = loadnpy('m_try') f = problem.func(m) savetxt('f_try',f)
def savetxt(self, filename, c): savetxt(PATH.OPTIMIZE + '/' + filename, c)
def evaluate_gradient(cls): m = loadnpy('m_new') f = problem.func(m) g = problem.grad(m) savetxt('f_new',f) savenpy('g_new',g)
def restart(self): """ Restarts algorithm """ self.iter = 1 savetxt(self.path + '/' + 'NLCG/iter', self.iter)
def restart(self): """ Restarts algorithm """ self.iter = 1 savetxt(self.path+'/'+'NLCG/iter', self.iter)
def evaluate_function(cls): m = loadnpy('m_try') f = problem.func(m) savetxt('f_try', f)
def evaluate_gradient(cls): m = loadnpy('m_new') f = problem.func(m) g = problem.grad(m) savetxt('f_new', f) savenpy('g_new', g)