def __init__(self, coords, potential, eigenvec0=None, rotational_steps=20, translational_steps=10, maxiter=500, leig_kwargs=None, translator_kwargs=None, dimer=True, ): coords = coords.copy() self.rotational_steps = rotational_steps self.translational_steps = translational_steps self.maxiter = maxiter self.iter_number = 0 # check the keyword dictionaries if translator_kwargs is None: translator_kwargs = {} if leig_kwargs is None: leig_kwargs = {} # set up the initial guess for the eigenvector if eigenvec0 is None: eigenvec0 = rotations.vec_random_ndim(coords.shape) eigenvec0 /= np.linalg.norm(eigenvec0) assert coords.shape == eigenvec0.shape # set up the object that will maintain the rotation of the dimer self.rotator = FindLowestEigenVector(coords, potential, eigenvec0=eigenvec0, **leig_kwargs) # set up the object that will translate the dimer if dimer: self.translator = _DimerTranslator(coords, potential, eigenvec0, **translator_kwargs) else: self.translator = _HybridEigenvectorWalker(coords, potential, eigenvec0, **translator_kwargs)
def test(self): x = self.system.get_random_configuration() evec = vec_random_ndim(x.size) evec /= np.linalg.norm(evec) opt = _DimerTranslator(x, self.pot, evec) ret = opt.run(10) self.assertEqual(ret.nfev, self.pot.nfev) self.assertGreater(ret.nfev, 0)
def _minimizeTangentSpace(self, coords, energy=None, gradient=None): """minimize the energy in the space perpendicular to eigenvec. Parameters ---------- coords : the current position energy, gradient : the energy and gradient at the current position """ assert gradient is not None if self._transverse_walker is None: if self.invert_gradient: # note: if we pass transverse energy and gradient here we can save 1 potential call self._transverse_walker = _DimerTranslator( coords, self.pot, self.eigenvec, **self.tangent_space_quench_params) else: self._transverse_walker = _TransverseWalker( coords, self.pot, self.eigenvec, energy, gradient, **self.tangent_space_quench_params) else: self._transverse_walker.update_eigenvec(self.eigenvec, self.eigenval) self._transverse_walker.update_coords(coords, energy, gradient) # determine the number of steps # i.e. if the eigenvector is deemed to have converged or is changing slowly eigenvec_converged = self.overlap > .999 if eigenvec_converged: nstepsperp = self.nsteps_tangent2 else: nstepsperp = self.nsteps_tangent1 # reduce the maximum step size if necessary maxstep = self.maxstep_tangent if self.reduce_step > 0: maxstep *= self.step_factor**self.reduce_step self._transverse_walker.update_maxstep(maxstep) coords_old = coords.copy() ret = self._transverse_walker.run(nstepsperp) coords = ret.coords self.tangent_move_step = np.linalg.norm(coords - coords_old) self.tangent_result = ret if self.tangent_move_step > 1e-16: try: self.energy, self.gradient = self._transverse_walker.get_true_energy_gradient( coords) except AttributeError: raise Exception( "was tspot was never called? use the same gradient") return ret
def _minimizeTangentSpace(self, coords, energy=None, gradient=None): """minimize the energy in the space perpendicular to eigenvec. Parameters ---------- coords : the current position energy, gradient : the energy and gradient at the current position """ assert gradient is not None if self._transverse_walker is None: if self.invert_gradient: # note: if we pass transverse energy and gradient here we can save 1 potential call self._transverse_walker = _DimerTranslator(coords, self.pot, self.eigenvec, # energy=transverse_energy, gradient=transverse_gradient, **self.tangent_space_quench_params) else: self._transverse_walker = _TransverseWalker(coords, self.pot, self.eigenvec, energy, gradient, **self.tangent_space_quench_params) else: self._transverse_walker.update_eigenvec(self.eigenvec, self.eigenval) self._transverse_walker.update_coords(coords, energy, gradient) #determine the number of steps #i.e. if the eigenvector is deemed to have converged eigenvec_converged = self.overlap > .999 nstepsperp = self.nsteps_tangent1 if eigenvec_converged: nstepsperp = self.nsteps_tangent2 # reduce the maximum step size if necessary maxstep = self.maxstep_tangent if self.reduce_step > 0: maxstep *= (self.step_factor)**self.reduce_step self._transverse_walker.update_maxstep(maxstep) coords_old = coords.copy() ret = self._transverse_walker.run(nstepsperp) coords = ret.coords self.tangent_move_step = np.linalg.norm(coords - coords_old) self.tangent_result = ret if self.tangent_move_step > 1e-16: try: self.energy, self.gradient = self._transverse_walker.get_true_energy_gradient(coords) except AttributeError: print "was tspot was never called? use the same gradient" raise return ret