def test(self):
     x = self.system.get_random_configuration()
     evec = vec_random_ndim(x.size)
     evec /= np.linalg.norm(evec) 
     opt = _TransverseWalker(x, self.pot, evec)
     ret = opt.run(10)
     self.assertEqual(ret.nfev, self.pot.nfev)
     self.assertGreater(ret.nfev, 0)
Exemple #2
0
 def test(self):
     x = self.system.get_random_configuration()
     evec = vec_random_ndim(x.size)
     evec /= np.linalg.norm(evec)
     opt = _TransverseWalker(x, self.pot, evec)
     ret = opt.run(10)
     self.assertEqual(ret.nfev, self.pot.nfev)
     self.assertGreater(ret.nfev, 0)
 def test(self):
     """assert the motion is perpendicular to evec"""
     x0 = self.x.copy()
     opt = _TransverseWalker(self.x, self.pot, self.evec)
     ret = opt.run(10)
     xnew = ret.coords
     
     dx = xnew - x0
     self.assertLess(np.dot(dx, self.evec), 1e-6)
Exemple #4
0
    def test(self):
        """assert the motion is perpendicular to evec"""
        x0 = self.x.copy()
        opt = _TransverseWalker(self.x, self.pot, self.evec)
        ret = opt.run(10)
        xnew = ret.coords

        dx = xnew - x0
        self.assertLess(np.dot(dx, self.evec), 1e-6)
    def _minimizeTangentSpace(self, coords, energy=None, gradient=None):
        """minimize the energy in the space perpendicular to eigenvec.
        
        Parameters
        ----------
        coords : the current position
        energy, gradient : the energy and gradient at the current position
        """
        assert gradient is not None
        if self._transverse_walker is None:
            if self.invert_gradient:
                # note: if we pass transverse energy and gradient here we can save 1 potential call
                self._transverse_walker = _DimerTranslator(
                    coords, self.pot, self.eigenvec,
                    **self.tangent_space_quench_params)
            else:
                self._transverse_walker = _TransverseWalker(
                    coords, self.pot, self.eigenvec, energy, gradient,
                    **self.tangent_space_quench_params)
        else:
            self._transverse_walker.update_eigenvec(self.eigenvec,
                                                    self.eigenval)
            self._transverse_walker.update_coords(coords, energy, gradient)

        # determine the number of steps
        # i.e. if the eigenvector is deemed to have converged or is changing slowly
        eigenvec_converged = self.overlap > .999
        if eigenvec_converged:
            nstepsperp = self.nsteps_tangent2
        else:
            nstepsperp = self.nsteps_tangent1

        # reduce the maximum step size if necessary
        maxstep = self.maxstep_tangent
        if self.reduce_step > 0:
            maxstep *= self.step_factor**self.reduce_step
        self._transverse_walker.update_maxstep(maxstep)

        coords_old = coords.copy()
        ret = self._transverse_walker.run(nstepsperp)

        coords = ret.coords
        self.tangent_move_step = np.linalg.norm(coords - coords_old)
        self.tangent_result = ret
        if self.tangent_move_step > 1e-16:
            try:
                self.energy, self.gradient = self._transverse_walker.get_true_energy_gradient(
                    coords)
            except AttributeError:
                raise Exception(
                    "was tspot was never called? use the same gradient")
        return ret
    def _minimizeTangentSpace(self, coords, energy=None, gradient=None):
        """minimize the energy in the space perpendicular to eigenvec.
        
        Parameters
        ----------
        coords : the current position
        energy, gradient : the energy and gradient at the current position
        """
        assert gradient is not None
        if self._transverse_walker is None:
            if self.invert_gradient:
                # note: if we pass transverse energy and gradient here we can save 1 potential call
                self._transverse_walker = _DimerTranslator(coords, self.pot, self.eigenvec,
#                                         energy=transverse_energy, gradient=transverse_gradient,
                                         **self.tangent_space_quench_params)
            else:
                self._transverse_walker = _TransverseWalker(coords, self.pot, self.eigenvec, energy, gradient,
                                                            **self.tangent_space_quench_params)
        else:
            self._transverse_walker.update_eigenvec(self.eigenvec, self.eigenval)
            self._transverse_walker.update_coords(coords, energy, gradient)
        
        #determine the number of steps
        #i.e. if the eigenvector is deemed to have converged
        eigenvec_converged = self.overlap > .999 
        nstepsperp = self.nsteps_tangent1
        if eigenvec_converged:
            nstepsperp = self.nsteps_tangent2

        # reduce the maximum step size if necessary
        maxstep = self.maxstep_tangent
        if self.reduce_step > 0:
            maxstep *= (self.step_factor)**self.reduce_step
        self._transverse_walker.update_maxstep(maxstep)

        coords_old = coords.copy()
        ret = self._transverse_walker.run(nstepsperp)

        coords = ret.coords
        self.tangent_move_step = np.linalg.norm(coords - coords_old)
        self.tangent_result = ret
        if self.tangent_move_step > 1e-16:
            try:
                self.energy, self.gradient = self._transverse_walker.get_true_energy_gradient(coords)
            except AttributeError:
                print "was tspot was never called? use the same gradient"
                raise
        return ret