Ejemplo n.º 1
0
 def check_status(self, g, r):
     theta = 180. * np.pi**-1 * angle(g, r)
     if not 0. < theta < 90.:
         print 'restarting LBFGS... [not a descent direction]'
         return 1
     elif theta > 90. - self.thresh:
         print 'restarting LBFGS... [practical safeguard]'
         return 1
     else:
         return 0
Ejemplo n.º 2
0
 def check_status(self, g, r):
     theta = 180.*np.pi**-1*angle(g,r)
     if not 0. < theta < 90.:
         print 'restarting LBFGS... [not a descent direction]'
         return 1
     elif theta > 90. - self.thresh:
         print 'restarting LBFGS... [practical safeguard]'
         return 1
     else:
         return 0
Ejemplo n.º 3
0
 def check_status(self, g, r):
     theta = angle(g, r)
     if theta < 0.:
         print 'restarting LBFGS... [not a descent direction]'
         return 1
     elif theta < self.thresh:
         print 'restarting LBFGS... [practical safeguard]'
         return 1
     else:
         return 0
Ejemplo n.º 4
0
 def check_status(self, g, r):
     theta = angle(g, r)
     if theta < 0.0:
         print "restarting LBFGS... [not a descent direction]"
         return 1
     elif theta < self.thresh:
         print "restarting LBFGS... [practical safeguard]"
         return 1
     else:
         return 0
Ejemplo n.º 5
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        s = loadtxt('s_new')

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm('alpha')
        unix.rm('m_try')
        unix.rm('f_try')

        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')

        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')
        unix.mv('s_new', 's_old')

        # write updated model
        alpha = x[f.argmin()]
        savetxt('alpha', alpha)
        self.save('m_new', m + alpha * p)
        savetxt('f_new', f.min())

        # append latest statistics
        self.writer('factor',
                    -self.dot(g, g)**-0.5 * (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('step_count', self.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180. * np.pi**-1 * angle(p, -g))

        self.stepwriter.newline()
Ejemplo n.º 6
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load("m_new")
        g = self.load("g_new")
        p = self.load("p_new")
        s = loadtxt("s_new")

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm("alpha")
        unix.rm("m_try")
        unix.rm("f_try")

        if self.iter > 1:
            unix.rm("m_old")
            unix.rm("f_old")
            unix.rm("g_old")
            unix.rm("p_old")
            unix.rm("s_old")

        unix.mv("m_new", "m_old")
        unix.mv("f_new", "f_old")
        unix.mv("g_new", "g_old")
        unix.mv("p_new", "p_old")
        unix.mv("s_new", "s_old")

        # write updated model
        alpha = x[f.argmin()]
        savetxt("alpha", alpha)
        self.save("m_new", m + alpha * p)
        savetxt("f_new", f.min())

        # append latest output
        self.writer("factor", -self.dot(g, g) ** -0.5 * (f[1] - f[0]) / (x[1] - x[0]))
        self.writer("gradient_norm_L1", np.linalg.norm(g, 1))
        self.writer("gradient_norm_L2", np.linalg.norm(g, 2))
        self.writer("misfit", f[0])
        self.writer("restarted", self.restarted)
        self.writer("slope", (f[1] - f[0]) / (x[1] - x[0]))
        self.writer("step_count", self.step_count)
        self.writer("step_length", x[f.argmin()])
        self.writer("theta", 180.0 * np.pi ** -1 * angle(p, -g))

        self.stepwriter.newline()
Ejemplo n.º 7
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        s = loadtxt('s_new')

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm('alpha')
        unix.rm('m_try')
        unix.rm('f_try')

        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')

        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')
        unix.mv('s_new', 's_old')

        # write updated model
        alpha = x[f.argmin()]
        savetxt('alpha', alpha)
        self.save('m_new', m + alpha*p)
        savetxt('f_new', f.min())

        # append latest statistics
        self.writer('factor', -self.dot(g,g)**-0.5 * (f[1]-f[0])/(x[1]-x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1]-f[0])/(x[1]-x[0]))
        self.writer('step_count', self.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180.*np.pi**-1*angle(p,-g))

        self.stepwriter.newline()
Ejemplo n.º 8
0
    def retry_status(self):
        """ Returns false if search direction was the same as gradient
          direction; returns true otherwise
        """
        g = self.load('g_new')
        p = self.load('p_new')

        thresh = 1.e-3
        theta = angle(p, -g)

        if PAR.VERBOSE >= 2:
            print ' theta: %6.3f' % theta

        if abs(theta) < thresh:
            return 0
        else:
            return 1
Ejemplo n.º 9
0
    def retry_status(self):
        """ Returns false if search direction was the same as gradient
          direction. Returns true otherwise.
        """
        unix.cd(PATH.OPTIMIZE)

        g = self.load("g_new")
        p = self.load("p_new")

        thresh = 1.0e-3
        theta = angle(p, -g)
        # print ' theta: %6.3f' % theta

        if abs(theta) < thresh:
            return 0
        else:
            return 1
Ejemplo n.º 10
0
    def retry_status(self):
        """ Returns false if search direction was the same as gradient
          direction. Returns true otherwise.
        """
        unix.cd(PATH.OPTIMIZE)

        g = self.load('g_new')
        p = self.load('p_new')

        thresh = 1.e-3
        theta = angle(p,-g)
        #print ' theta: %6.3f' % theta

        if abs(theta) < thresh:
            return 0
        else:
            return 1
Ejemplo n.º 11
0
    def retry_status(self):
        """ Determines if restart is worthwhile

          After failed line search, determines if restart is worthwhile by 
          checking, in effect, if search direction was the same as gradient
          direction
        """
        g = self.load('g_new')
        p = self.load('p_new')
        theta = angle(p, -g)

        if PAR.VERBOSE >= 2:
            print ' theta: %6.3f' % theta

        thresh = 1.e-3
        if abs(theta) < thresh:
            return 0
        else:
            return 1
Ejemplo n.º 12
0
    def retry_status(self):
        """ Determines if restart is worthwhile

          After failed line search, determines if restart is worthwhile by 
          checking, in effect, if search direction was the same as gradient
          direction
        """
        g = self.load('g_new')
        p = self.load('p_new')
        theta = angle(p,-g)

        if PAR.VERBOSE >= 2:
            print ' theta: %6.3f' % theta

        thresh = 1.e-3
        if abs(theta) < thresh:
            return 0
        else:
            return 1
Ejemplo n.º 13
0
    def retry_status(self):
        """ Returns false if search direction was the same as gradient
          direction; returns true otherwise
        """
        unix.cd(PATH.OPTIMIZE)

        g = self.load('g_new')
        p = self.load('p_new')

        thresh = 1.e-3
        theta = angle(p,-g)

        if PAR.VERBOSE >= 2:
            print ' theta: %6.3f' % theta

        if abs(theta) < thresh:
            return 0
        else:
            return 1
Ejemplo n.º 14
0
    def finalize_search(self):
        """ Prepares algorithm machinery and scratch directory for next
          model upate
        """
        m = self.load('m_new')
        print("finalize_search")
        print(m)
        g = self.load('g_new')
        p = self.load('p_new')
        x = self.line_search.search_history()[0]
        f = self.line_search.search_history()[1]

        # clean scratch directory
        unix.cd(PATH.OPTIMIZE)
        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')
        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')

        unix.mv('m_try', 'm_new')
        self.savetxt('f_new', f.min())

        # output latest statistics
        self.writer('factor',
                    -self.dot(g, g)**-0.5 * (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('step_count', self.line_search.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180. * np.pi**-1 * angle(p, -g))

        self.line_search.writer.newline()
Ejemplo n.º 15
0
    def finalize_search(self):
        """ Prepares algorithm machinery and scratch directory for next
          model upate
        """
        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        x = self.line_search.search_history()[0]
        f = self.line_search.search_history()[1]

        # clean scratch directory
        unix.cd(PATH.OPTIMIZE)
        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')
        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')

        unix.mv('m_try', 'm_new')
        self.savetxt('f_new', f.min())

        # output latest statistics
        self.writer('factor', -self.dot(g,g)**-0.5 * (f[1]-f[0])/(x[1]-x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1]-f[0])/(x[1]-x[0]))
        self.writer('step_count', self.line_search.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180.*np.pi**-1*angle(p,-g))

        self.line_search.writer.newline()
Ejemplo n.º 16
0
    def retry_status(self):
        """ Returns false if search direction was the same as gradient
          direction; returns true otherwise

          Here and elsewhere we use the convention
              status > 0  : success
              status == 0 : not finished
              status < 0  : failed

        """
        g = self.load('g_new')
        p = self.load('p_new')

        thresh = 1.e-3
        theta = angle(p, -g)

        if PAR.VERBOSE >= 2:
            print ' theta: %6.3f' % theta

        if abs(theta) < thresh:
            return 0
        else:
            return 1