def attach_randomly_and_broadcast(atoms1, atoms2, distance, rng=np.random, comm=world): """Randomly attach two structures with a given minimal distance and ensure that these are distributed. Parameters ---------- atoms1: Atoms object atoms2: Atoms object distance: float Required distance rng: random number generator object defaults to np.random.RandomState() comm: communicator to distribute Communicator to distribute the structure, default: world Returns ------- Joined structure as an atoms object. """ if comm.rank == 0: joined = attach_randomly(atoms1, atoms2, distance, rng) broadcast(joined, 0, comm=comm) else: joined = broadcast(None, 0, comm) return joined
def new_generator(*args, **kwargs): if world.rank == 0: for result in generator(*args, **kwargs): result = broadcast(result) yield result broadcast(None) else: result = broadcast(None) while result is not None: yield result
def new_method(*args, **kwargs): if world.rank == 0: result = method(*args, **kwargs) else: result = None result = broadcast(result) return result
def new_method(*args, **kwargs): ex = None result = None if world.rank == 0: try: result = method(*args, **kwargs) except Exception as ex: pass ex, result = broadcast((ex, result)) if ex is not None: raise ex return result
def calculate(self, atoms=None, properties=['energy'], system_changes=all_changes): # We don't call FileIOCalculator.calculate here, because that method # calls subprocess.call(..., shell=True), which we don't want to do. # So, we reproduce some content from that method here. Calculator.calculate(self, atoms, properties, system_changes) # If a parameter file exists in the working directory, delete it # first. If we need that file, we'll recreate it later. localparfile = os.path.join(self.directory, '.dftd3par.local') if os.path.isfile(localparfile): os.remove(localparfile) # Write XYZ or POSCAR file and .dftd3par.local file if we are using # custom damping parameters. self.write_input(self.atoms, properties, system_changes) command = self._generate_command() # Finally, call dftd3 and parse results. with paropen(self.label + '.out', 'w') as f: if world.rank == 0: # DFTD3 does not run in parallel # so we only need it to run on 1 core errorcode = subprocess.call(command, cwd=self.directory, stdout=f) else: errorcode = None world.barrier() # Wait for the call() to complete on the master node errorcode = broadcast(errorcode, root=0) if errorcode: raise RuntimeError('%s returned an error: %d' % (self.name, errorcode)) self.read_results()
def read_results(self): # parse the energy outname = os.path.join(self.directory, self.label + '.out') self.results['energy'] = None self.results['free_energy'] = None if world.rank == 0: with open(outname, 'r') as f: for line in f: if line.startswith(' program stopped'): if 'functional name unknown' in line: message = 'Unknown DFTD3 functional name "{}". ' \ 'Please check the dftd3.f source file ' \ 'for the list of known functionals ' \ 'and their spelling.' \ ''.format(self.parameters['xc']) else: message = 'dftd3 failed! Please check the {} ' \ 'output file and report any errors ' \ 'to the ASE developers.' \ ''.format(outname) raise RuntimeError(message) if line.startswith(' Edisp'): e_dftd3 = float(line.split()[3]) * Hartree self.results['energy'] = e_dftd3 self.results['free_energy'] = e_dftd3 break else: raise RuntimeError('Could not parse energy from dftd3 ' 'output, see file {}'.format(outname)) self.results['energy'] = broadcast(self.results['energy'], root=0) self.results['free_energy'] = broadcast(self.results['free_energy'], root=0) # FIXME: Calculator.get_potential_energy() simply inspects # self.results for the free energy rather than calling # Calculator.get_property('free_energy'). For example, GPAW does # not actually present free_energy as an implemented property, even # though it does calculate it. So, we are going to add in the DFT # free energy to our own results if it is present in the attached # calculator. TODO: Fix the Calculator interface!!! if self.dft is not None: try: efree = self.dft.get_potential_energy(force_consistent=True) self.results['free_energy'] += efree except PropertyNotImplementedError: pass if self.parameters['grad']: # parse the forces forces = np.zeros((len(self.atoms), 3)) forcename = os.path.join(self.directory, 'dftd3_gradient') self.results['forces'] = None if world.rank == 0: with open(forcename, 'r') as f: for i, line in enumerate(f): forces[i] = np.array([float(x) for x in line.split()]) self.results['forces'] = -forces * Hartree / Bohr self.results['forces'] = broadcast(self.results['forces'], root=0) if any(self.atoms.pbc): # parse the stress tensor stress = np.zeros((3, 3)) stressname = os.path.join(self.directory, 'dftd3_cellgradient') self.results['stress'] = None if world.rank == 0: with open(stressname, 'r') as f: for i, line in enumerate(f): for j, x in enumerate(line.split()): stress[i, j] = float(x) stress *= Hartree / Bohr / self.atoms.get_volume() stress = np.dot(stress, self.atoms.cell.T) self.results['stress'] = stress.flat[[0, 4, 8, 5, 2, 1]] self.results['stress'] = broadcast(self.results['stress'], root=0)
def main(): atoms = build.bulk("Al") atoms = atoms * (2, 2, 2) print(len(atoms)) nRuns = 10 optimizerFname = "optimizer.pck" for i in range(nRuns): nMgAtoms = np.random.randint(0, len(atoms) / 2) # Insert Mg atoms system = cp.copy(atoms) if (parallel.rank == 0): for j in range(nMgAtoms): system[i].symbol = "Mg" # Shuffle the list for j in range(10 * len(system)): first = np.random.randint(0, len(system)) second = np.random.randint(0, len(system)) symb1 = system[first].symbol system[first].symbol = system[second].symbol system[second].symbol = symb1 system = parallel.broadcast(system) # Initialize the calculator calc = gp.GPAW(mode=gp.PW(400), xc="PBE", kpts=(4, 4, 4), nbands=-10) system.set_calculator(calc) traj = Trajectory("trajectoryResuse.traj", 'w', atoms) if (i == 0): relaxer = PreconLBFGS(UnitCellFilter(system), logfile="resuse.log") else: relaxer = None relaxParams = None if (parallel.rank == 0): with open(optimizerFname, 'rb') as infile: relaxParams = pck.load(infile) relaxParams = parallel.broadcast(relaxParams) precon = Exp(r_cut=relaxParams["r_cut"], r_NN=relaxParams["r_NN"], mu=relaxParams["mu"], mu_c=relaxParams["mu_c"]) relaxer = PreconLBFGS(UnitCellFilter(system), logfile="resuse.log", precon=precon) relaxer.attach(traj) relaxer.run(fmax=0.05) print(relaxer.iteration) if (parallel.rank == 0): with open(optimizerFname, 'wb') as outfile: relaxParams = { "r_cut": relaxer.precon.r_cut, "r_NN": relaxer.precon.r_NN, "mu": relaxer.precon.mu, "mu_c": relaxer.precon.mu_c } pck.dump(relaxParams, outfile, pck.HIGHEST_PROTOCOL) parallel.barrier()