Beispiel #1
0
    def test_saveloadtxt(self):
        tmp_file = NamedTemporaryFile(mode='wb', delete=False)
        tmp_file.close()

        x = 3.14159265359
        tools.savetxt(tmp_file.name, x)
        self.assertAlmostEqual(x, tools.loadtxt(tmp_file.name), 6)
Beispiel #2
0
    def compute_direction(cls):
        """ Computes model update direction from stored function and gradient 
          values
        """
        unix.cd(cls.path)
        m_new = loadnpy('m_new')
        f_new = loadtxt('f_new')
        g_new = loadnpy('g_new')

        if PAR.SCHEME == 'GradientDescent':
            p_new = -g_new

        elif PAR.SCHEME == 'ConjugateGradient':
            # compute NLCG udpate
            p_new = cls.NLCG.compute()

        elif PAR.SCHEME == 'QuasiNewton':
            # compute L-BFGS update
            if cls.iter == 1:
                p_new = -g_new
            else:
                cls.LBFGS.update()
                p_new = -cls.LBFGS.solve()

        # save results
        unix.cd(cls.path)
        savenpy('p_new', p_new)
        savetxt('s_new', np.dot(g_new, p_new))
Beispiel #3
0
    def finalize_search(cls):
        """ Cleans working directory and writes updated model
        """
        unix.cd(cls.path)
        m0 = loadnpy('m_new')
        p = loadnpy('p_new')

        x = cls.step_lens()
        f = cls.func_vals()

        # clean working directory
        unix.rm('alpha')
        unix.rm('m_try')
        unix.rm('f_try')

        if cls.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')

        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')
        unix.mv('s_new', 's_old')

        # write updated model
        alpha = x[f.argmin()]
        savetxt('alpha', alpha)
        savenpy('m_new', m0 + p * alpha)
        savetxt('f_new', f.min())

        cls.writer([], [], [])
Beispiel #4
0
    def compute_step(cls):
        """ Computes next trial step length
        """
        unix.cd(cls.path)
        m0 = loadnpy('m_new')
        p = loadnpy('p_new')
        f0 = loadtxt('f_new')
        g0 = loadtxt('s_new')

        x = cls.step_lens()
        f = cls.func_vals()

        # compute trial step length
        if PAR.SRCHTYPE == 'Backtrack':
            alpha = lib.backtrack2(f0, g0, x[1], f[1], b1=0.1, b2=0.5)

        elif PAR.SRCHTYPE == 'Bracket':
            FACTOR = 2.
            if any(f[1:] < f[0]) and (f[-2] < f[-1]):
                alpha = lib.polyfit2(x, f)
            elif any(f[1:] < f[0]):
                alpha = loadtxt('alpha') * FACTOR
            else:
                alpha = loadtxt('alpha') * FACTOR**-1

        elif PAR.SRCHTYPE == 'Fixed':
            alpha = cls.step_ratio * (step + 1) * PAR.STEPLEN

        else:
            raise ValueError

        # write trial model
        savetxt('alpha', alpha)
        savenpy('m_try', m0 + p * alpha)
Beispiel #5
0
 def test_savetxt(self):
     filename = "tmp_savetxt"
     x = 3.14159265359
     tools.savetxt(filename, x)
     with open(filename, 'r') as f:
        y = float(f.readline())
     os.remove(filename)
     self.assertAlmostEqual(x, y, places=6)
Beispiel #6
0
    def initialize_search(cls):
        """ Determines initial step length for line search
        """
        unix.cd(cls.path)
        if cls.iter == 1:
            s_new = loadtxt('s_new')
            f_new = loadtxt('f_new')
        else:
            s_old = loadtxt('s_old')
            s_new = loadtxt('s_new')
            f_old = loadtxt('f_old')
            f_new = loadtxt('f_new')
            alpha = loadtxt('alpha')

        m = loadnpy('m_new')
        p = loadnpy('p_new')

        # reset search history
        cls.search_history = [[0., f_new]]
        cls.isdone = 0
        cls.isbest = 0
        cls.isbrak = 0

        # determine initial step length
        len_m = max(abs(m))
        len_d = max(abs(p))
        cls.step_ratio = float(len_m / len_d)

        if cls.iter == 1:
            assert PAR.STEPLEN != 0.
            alpha = PAR.STEPLEN * cls.step_ratio
        elif PAR.SRCHTYPE in ['Bracket']:
            alpha *= 2. * s_old / s_new
        elif PAR.SCHEME in ['GradientDescent', 'ConjugateGradient']:
            alpha *= 2. * s_old / s_new
        else:
            alpha = 1.

        # ad hoc scaling
        if PAR.ADHOCSCALING:
            alpha *= PAR.ADHOCSCALING

        # limit maximum step length
        if PAR.STEPMAX > 0.:
            if alpha / cls.step_ratio > PAR.STEPMAX:
                alpha = PAR.STEPMAX * cls.step_ratio

        # write trial model
        savenpy('m_try', m + p * alpha)
        savetxt('alpha', alpha)

        cls.writer(cls.iter, 0., f_new)
Beispiel #7
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        s = loadtxt('s_new')

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm('alpha')
        unix.rm('m_try')
        unix.rm('f_try')

        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')

        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')
        unix.mv('s_new', 's_old')

        # write updated model
        alpha = x[f.argmin()]
        savetxt('alpha', alpha)
        self.save('m_new', m + alpha * p)
        savetxt('f_new', f.min())

        # append latest statistics
        self.writer('factor',
                    -self.dot(g, g)**-0.5 * (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('step_count', self.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180. * np.pi**-1 * angle(p, -g))

        self.stepwriter.newline()
Beispiel #8
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load("m_new")
        g = self.load("g_new")
        p = self.load("p_new")
        s = loadtxt("s_new")

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm("alpha")
        unix.rm("m_try")
        unix.rm("f_try")

        if self.iter > 1:
            unix.rm("m_old")
            unix.rm("f_old")
            unix.rm("g_old")
            unix.rm("p_old")
            unix.rm("s_old")

        unix.mv("m_new", "m_old")
        unix.mv("f_new", "f_old")
        unix.mv("g_new", "g_old")
        unix.mv("p_new", "p_old")
        unix.mv("s_new", "s_old")

        # write updated model
        alpha = x[f.argmin()]
        savetxt("alpha", alpha)
        self.save("m_new", m + alpha * p)
        savetxt("f_new", f.min())

        # append latest output
        self.writer("factor", -self.dot(g, g) ** -0.5 * (f[1] - f[0]) / (x[1] - x[0]))
        self.writer("gradient_norm_L1", np.linalg.norm(g, 1))
        self.writer("gradient_norm_L2", np.linalg.norm(g, 2))
        self.writer("misfit", f[0])
        self.writer("restarted", self.restarted)
        self.writer("slope", (f[1] - f[0]) / (x[1] - x[0]))
        self.writer("step_count", self.step_count)
        self.writer("step_length", x[f.argmin()])
        self.writer("theta", 180.0 * np.pi ** -1 * angle(p, -g))

        self.stepwriter.newline()
Beispiel #9
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        s = loadtxt('s_new')

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm('alpha')
        unix.rm('m_try')
        unix.rm('f_try')

        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')

        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')
        unix.mv('s_new', 's_old')

        # write updated model
        alpha = x[f.argmin()]
        savetxt('alpha', alpha)
        self.save('m_new', m + alpha*p)
        savetxt('f_new', f.min())

        # append latest statistics
        self.writer('factor', -self.dot(g,g)**-0.5 * (f[1]-f[0])/(x[1]-x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1]-f[0])/(x[1]-x[0]))
        self.writer('step_count', self.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180.*np.pi**-1*angle(p,-g))

        self.stepwriter.newline()
Beispiel #10
0
    def initialize(self):
        unix.mkdir(self.path+'/'+'LCG')
        unix.cd(self.path)

        self.iter += 1
        self.ilcg = 0

        r = self.load('g_new')
        x = np.zeros(r.size)
        self.save('LCG/x', x)
        self.save('LCG/r', r)

        y = self.apply_precond(r)
        p = -y
        self.save('LCG/y', y)
        self.save('LCG/p', p)
        savetxt('LCG/ry', np.dot(r, y))
Beispiel #11
0
    def initialize(self):
        unix.mkdir(self.path + '/' + 'LCG')
        unix.cd(self.path)

        self.iter += 1
        self.ilcg = 0

        r = self.load('g_new')
        x = np.zeros(r.size)
        self.save('LCG/x', x)
        self.save('LCG/r', r)

        y = self.apply_precond(r)
        p = -y
        self.save('LCG/y', y)
        self.save('LCG/p', p)
        savetxt('LCG/ry', np.dot(r, y))
Beispiel #12
0
    def initialize_search(self):
        """ Determines initial step length for line search
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        p = self.load('p_new')
        f = loadtxt('f_new')
        norm_m = max(abs(m))
        norm_p = max(abs(p))
        p_ratio = float(norm_m / norm_p)

        # reset search history
        self.search_history = [[0., f]]
        self.step_count = 0
        self.isdone = 0
        self.isbest = 0
        self.isbrak = 0

        # determine initial step length
        if self.iter == 1:
            alpha = p_ratio * PAR.STEPINIT
        elif self.restarted:
            alpha = p_ratio * PAR.STEPINIT
        elif PAR.SCHEME in ['LBFGS']:
            alpha = 1.
        else:
            alpha = self.initial_step()

        # optional ad hoc scaling
        if PAR.STEPOVERSHOOT:
            alpha *= PAR.STEPOVERSHOOT

        # optional maximum step length safegaurd
        if PAR.STEPTHRESH:
            if alpha > p_ratio * PAR.STEPTHRESH and \
                self.iter > 1:
                alpha = p_ratio * PAR.STEPTHRESH

        # write trial model corresponding to chosen step length
        savetxt('alpha', alpha)
        self.save('m_try', m + alpha * p)

        # upate log
        self.stepwriter(steplen=0., funcval=f)
Beispiel #13
0
    def initialize_search(self):
        """ Determines initial step length for line search
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        p = self.load('p_new')
        f = loadtxt('f_new')
        norm_m = max(abs(m))
        norm_p = max(abs(p))
        p_ratio = float(norm_m/norm_p)

        # reset search history
        self.search_history = [[0., f]]
        self.step_count = 0
        self.isdone = 0
        self.isbest = 0
        self.isbrak = 0

        # determine initial step length
        if self.iter == 1:
            alpha = p_ratio*PAR.STEPINIT
        elif self.restarted:
            alpha = p_ratio*PAR.STEPINIT
        elif PAR.SCHEME in ['LBFGS']:
            alpha = 1.
        else:
            alpha = self.initial_step()

        # optional ad hoc scaling
        if PAR.STEPOVERSHOOT:
            alpha *= PAR.STEPOVERSHOOT

        # optional maximum step length safegaurd
        if PAR.STEPTHRESH:
            if alpha > p_ratio * PAR.STEPTHRESH and \
                self.iter > 1:
                alpha = p_ratio * PAR.STEPTHRESH

        # write trial model corresponding to chosen step length
        savetxt('alpha', alpha)
        self.save('m_try', m + alpha*p)

        # upate log
        self.stepwriter(steplen=0., funcval=f)
Beispiel #14
0
    def update(self, ap):
        unix.cd(self.path)

        self.ilcg += 1

        x = self.load('LCG/x')
        r = self.load('LCG/r')
        y = self.load('LCG/y')
        p = self.load('LCG/p')
        ry = loadtxt('LCG/ry')

        pap = np.dot(p, ap)
        if pap < 0:
            print ' Stopping LCG [negative curvature]'
            isdone = True
            return isdone

        alpha = ry / pap
        x += alpha * p
        r += alpha * ap
        self.save('LCG/x', x)
        self.save('LCG/r', r)

        # check status
        if self.check_status(ap) == 0:
            isdone = True
        elif self.ilcg >= self.maxiter:
            isdone = True
        else:
            isdone = False

        if not isdone:
            y = self.apply_precond(r)
            ry_old = ry
            ry = np.dot(r, y)
            beta = ry / ry_old
            p = -y + beta * p

            self.save('LCG/y', y)
            self.save('LCG/p', p)
            savetxt('LCG/ry', np.dot(r, y))

        return isdone
Beispiel #15
0
    def update(self, ap):
        unix.cd(self.path)

        self.ilcg += 1

        x = self.load('LCG/x')
        r = self.load('LCG/r')
        y = self.load('LCG/y')
        p = self.load('LCG/p')
        ry = loadtxt('LCG/ry')

        pap = np.dot(p, ap)
        if pap < 0:
            print ' Stopping LCG [negative curvature]'
            isdone = True
            return isdone
                       
        alpha = ry/pap
        x += alpha*p
        r += alpha*ap
        self.save('LCG/x', x)
        self.save('LCG/r', r)

        # check status
        if self.check_status(ap) == 0:
            isdone = True
        elif self.ilcg >= self.maxiter:
            isdone = True
        else:
            isdone = False

        if not isdone:
            y = self.apply_precond(r)
            ry_old = ry
            ry = np.dot(r, y)
            beta = ry/ry_old
            p = -y + beta*p

            self.save('LCG/y', y)
            self.save('LCG/p', p)
            savetxt('LCG/ry', np.dot(r, y))

        return isdone
Beispiel #16
0
    def compute_step(self):
        """ Computes next trial step length
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        s = loadtxt('s_new')

        norm_m = max(abs(m))
        norm_p = max(abs(p))
        p_ratio = float(norm_m / norm_p)

        x = self.step_lens()
        f = self.func_vals()

        # compute trial step length
        if PAR.LINESEARCH == 'Fixed':
            alpha = p_ratio * (self.step_count + 1) * PAR.STEPINIT

        #elif PAR.LINESEARCH == 'Bracket' or \
        #    self.iter == 1 or self.restarted:
        elif PAR.LINESEARCH == 'Bracket':
            if any(f[1:] < f[0]) and (f[-2] < f[-1]):
                alpha = polyfit2(x, f)

            elif any(f[1:] <= f[0]):
                alpha = loadtxt('alpha') * PAR.STEPFACTOR**-1
            else:
                alpha = loadtxt('alpha') * PAR.STEPFACTOR

        elif PAR.LINESEARCH == 'Backtrack':
            # calculate slope along 1D profile
            slope = s / self.dot(g, g)**0.5
            if PAR.ADHOCFACTOR:
                slope *= PAR.ADHOCFACTOR

            alpha = backtrack2(f[0], slope, x[1], f[1], b1=0.1, b2=0.5)

        # write trial model corresponding to chosen step length
        savetxt('alpha', alpha)
        self.save('m_try', m + alpha * p)
Beispiel #17
0
    def compute_step(self):
        """ Computes next trial step length
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        s = loadtxt('s_new')

        norm_m = max(abs(m))
        norm_p = max(abs(p))
        p_ratio = float(norm_m/norm_p)

        x = self.step_lens()
        f = self.func_vals()

        # compute trial step length
        if PAR.LINESEARCH == 'Fixed':
            alpha = p_ratio*(self.step_count + 1)*PAR.STEPINIT

        elif PAR.LINESEARCH == 'Bracket' or \
            self.iter==1 or self.restarted:
            if any(f[1:] < f[0]) and (f[-2] < f[-1]):
                alpha = polyfit2(x, f)

            elif any(f[1:] <= f[0]):
                alpha = loadtxt('alpha')*PAR.STEPFACTOR**-1
            else:
                alpha = loadtxt('alpha')*PAR.STEPFACTOR

        elif PAR.LINESEARCH == 'Backtrack':
            # calculate slope along 1D profile
            slope = s/self.dot(g,g)**0.5
            if PAR.ADHOCFACTOR:
                slope *= PAR.ADHOCFACTOR            

            alpha = backtrack2(f[0], slope, x[1], f[1], b1=0.1, b2=0.5)

        # write trial model corresponding to chosen step length
        savetxt('alpha', alpha)
        self.save('m_try', m + alpha*p)
Beispiel #18
0
    def compute_direction(self):
        """ Computes model update direction from stored gradient
        """
        unix.cd(PATH.OPTIMIZE)

        g_new = self.load('g_new')

        if PAR.SCHEME in ['GradientDescent', 'SteepestDescent']:
            p_new, self.restarted = -g_new, False

        elif PAR.SCHEME in ['NLCG']:
            p_new, self.restarted = self.NLCG()

        elif PAR.SCHEME in ['LBFGS']:
            p_new, self.restarted = self.LBFGS()

        self.save('p_new', p_new)
        savetxt('s_new', self.dot(g_new, p_new))

        return p_new
Beispiel #19
0
    def restart(self):
        """ Discards history of algorithm; prepares to start again from 
          gradient direction
        """
        unix.cd(PATH.OPTIMIZE)

        g = self.load('g_new')

        self.save('p_new', -g)
        savetxt('s_new', self.dot(g, g))

        if PAR.SCHEME in ['NLCG']:
            self.NLCG.restart()
        elif PAR.SCHEME in ['LBFGS']:
            self.LBFGS.restart()

        self.restarted = 1
        self.stepwriter.iter -= 1

        self.stepwriter.newline()
Beispiel #20
0
    def restart(self):
        """ Discards history of algorithm; prepares to start again from 
          gradient direction
        """
        unix.cd(PATH.OPTIMIZE)

        g = self.load("g_new")

        self.save("p_new", -g)
        savetxt("s_new", self.dot(g, g))

        if PAR.SCHEME in ["NLCG"]:
            self.NLCG.restart()
        elif PAR.SCHEME in ["LBFGS"]:
            self.LBFGS.restart()

        self.restarted = 1
        self.stepwriter.iter -= 1

        self.stepwriter.newline()
Beispiel #21
0
    def compute_direction(self):
        """ Computes model update direction from stored gradient
        """
        unix.cd(PATH.OPTIMIZE)

        g_new = self.load('g_new')

        if PAR.SCHEME in ['GradientDescent', 'SteepestDescent']:
            p_new, self.restarted = -g_new, False

        elif PAR.SCHEME in ['NLCG']:
            p_new, self.restarted = self.NLCG()

        elif PAR.SCHEME in ['LBFGS']:
            p_new, self.restarted = self.LBFGS()

        self.save('p_new', p_new)
        savetxt('s_new', self.dot(g_new, p_new))

        return p_new
Beispiel #22
0
    def compute_direction(self):
        """ Computes model update direction from stored gradient
        """
        unix.cd(PATH.OPTIMIZE)

        g_new = self.load("g_new")

        if PAR.SCHEME in ["SD"]:
            p_new, self.restarted = -g_new, False

        elif PAR.SCHEME in ["NLCG"]:
            p_new, self.restarted = self.NLCG()

        elif PAR.SCHEME in ["LBFGS"]:
            p_new, self.restarted = self.LBFGS()

        self.save("p_new", p_new)
        savetxt("s_new", self.dot(g_new, p_new))

        return p_new
Beispiel #23
0
    def __call__(self):
        """ Returns NLCG search direction
        """
        self.iter += 1
        savetxt(self.path+'/'+'NLCG/iter', self.iter)

        unix.cd(self.path)
        g_new = self.load('g_new')

        if self.iter == 1:
            return -g_new, 0

        elif self.iter > self.maxiter:
            print 'restarting NLCG... [periodic restart]'
            self.restart()
            return -g_new, 1

        # compute search direction
        g_old = self.load('g_old')
        p_old = self.load('p_old')

        if self.precond:
            beta = pollak_ribere(g_new, g_old, self.precond)
            p_new = -self.precond(g_new) + beta*p_old
        else:
            beta = pollak_ribere(g_new, g_old)
            p_new = -g_new + beta*p_old

        # check restart conditions
        if check_conjugacy(g_new, g_old) > self.thresh:
            print 'restarting NLCG... [loss of conjugacy]'
            self.restart()
            return -g_new, 1

        elif check_descent(p_new, g_new) > 0.:
            print 'restarting NLCG... [not a descent direction]'
            self.restart()
            return -g_new, 1

        else:
            return p_new, 0
 def evaluate_function(cls):
     m = loadnpy('m_try')
     f = problem.func(m)
     savetxt('f_try',f)
     print f
Beispiel #25
0
 def restart(self):
     """ Restarts algorithm
     """
     self.iter = 1
     savetxt(self.path+'/'+'NLCG/iter', self.iter)
 def evaluate_function(cls):
     m = loadnpy('m_try')
     f = problem.func(m)
     savetxt('f_try', f)
     print f
 def evaluate_gradient(cls):
     m = loadnpy('m_new')
     f = problem.func(m)
     g = problem.grad(m)
     savetxt('f_new', f)
     savenpy('g_new', g)
 def evaluate_gradient(cls):
     m = loadnpy('m_new')
     f = problem.func(m)
     g = problem.grad(m)
     savetxt('f_new',f)
     savenpy('g_new',g)