def print_status(self): pcm_log.info(' %s (tag: %s)' % (self.population.name, self.population.tag)) pcm_log.info(' Current Generation : %4d' % self.current_generation) pcm_log.info(' Population (evaluated/total) : %4d /%4d' % (len(self.population.evaluated), len(self.population.members))) pcm_log.info(' Actives (evaluated/total) : %4d /%4d' % (len(self.population.actives_evaluated), len(self.population.actives))) pcm_log.info(' Size of Generation (this/next) : %4d /%4d\n' % (len(self.get_generation()), len(self.get_generation( self.current_generation + 1)))) if len(self.get_generation(self.current_generation + 1)) + len(self.population.actives) != self.generation_size: pcm_log.debug('Change in generations') for i in self.old_actives: if i not in self.population.actives: pcm_log.debug('This active disappeared: %s' % str(i)) for i in self.population.actives: if i not in self.old_actives: pcm_log.debug('This active appeared: %s' % str(i)) for i in self.old_nextgen: if i not in self.get_generation(self.current_generation + 1): pcm_log.debug('This nextgen disappeared: %s' % str(i)) for i in self.get_generation(self.current_generation + 1): if i not in self.old_nextgen: pcm_log.debug('This nextgen appeared: %s' % str(i)) self.old_actives = self.population.actives self.old_nextgen = self.get_generation(self.current_generation + 1)
def print_status(self): pcm_log.info(' %s (tag: %s)' % (self.population.name, self.population.tag)) pcm_log.info(' Current Generation : %4d' % self.current_generation) pcm_log.info( ' Population (evaluated/total) : %4d /%4d' % (len(self.population.evaluated), len(self.population.members))) pcm_log.info(' Actives (evaluated/total) : %4d /%4d' % (len( self.population.actives_evaluated), len(self.population.actives))) pcm_log.info(' Size of Generation (this/next) : %4d /%4d\n' % (len(self.get_generation()), len(self.get_generation(self.current_generation + 1)))) if len(self.get_generation(self.current_generation + 1)) + len( self.population.actives) != self.generation_size: pcm_log.debug('Change in generations') for i in self.old_actives: if i not in self.population.actives: pcm_log.debug('This active disappeared: %s' % str(i)) for i in self.population.actives: if i not in self.old_actives: pcm_log.debug('This active appeared: %s' % str(i)) for i in self.old_nextgen: if i not in self.get_generation(self.current_generation + 1): pcm_log.debug('This nextgen disappeared: %s' % str(i)) for i in self.get_generation(self.current_generation + 1): if i not in self.old_nextgen: pcm_log.debug('This nextgen appeared: %s' % str(i)) self.old_actives = self.population.actives self.old_nextgen = self.get_generation(self.current_generation + 1)
def run(self, nparal=1): self.started = True self.cleaner() vj = self.vaspjob ncalls = 1 self.first_run(nparal) while True: if vj.runner is not None and vj.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % vj.runner.returncode) filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): read_vasp_stdout(filename=filename) ncalls += 1 va = VaspAnalyser(self.workdir) va.run() print('READING FORCES AND STRESS') max_force, max_stress = self.get_max_force_stress() if max_force is not None and max_stress is not None: pcm_log.debug('Max Force: %9.3E Stress: %9.3E' % (max_force, max_stress)) vo = VaspOutput(self.workdir + os.sep + 'OUTCAR') info = vo.relaxation_info() pcm_log.debug('Avg Force: %9.3E Stress: %9.3E %9.3E' % (info['avg_force'], info['avg_stress_diag'], info['avg_stress_non_diag'])) if self.stage == 7 and info['avg_force'] < self.target_forces and \ info['avg_stress_diag'] < self.target_forces and \ info['avg_stress_non_diag'] < self.target_forces: break else: print('Failure to get forces and stress') print('UPDATING INPUTS FOR NEXT RUN') self.update() vj.run(use_mpi=True, mpi_num_procs=nparal) if self.waiting: vj.runner.wait() else: filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): vasp_stdout = read_vasp_stdout(filename=filename) if len(vasp_stdout['iterations']) > 0: pcm_log.debug('[%s] SCF: %s' % (os.path.basename( self.workdir), str(vasp_stdout['iterations']))) #if os.path.isfile(self.workdir + os.sep + 'OUTCAR'): # vj.get_outputs() time.sleep(30)
def run(self, nparal=1): self.started = True self.cleaner() vj = self.vaspjob ncalls = 1 self.first_run(nparal) while True: if vj.runner is not None and vj.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % vj.runner.returncode) filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): read_vasp_stdout(filename=filename) ncalls += 1 va = VaspAnalyser(self.workdir) va.run() print('READING FORCES AND STRESS') max_force, max_stress = self.get_max_force_stress() if max_force is not None and max_stress is not None: pcm_log.debug('Max Force: %9.3E Stress: %9.3E' % (max_force, max_stress)) vo = VaspOutput(self.workdir + os.sep + 'OUTCAR') info = vo.relaxation_info() pcm_log.debug('Avg Force: %9.3E Stress: %9.3E %9.3E' % (info['avg_force'], info['avg_stress_diag'], info['avg_stress_non_diag'])) if self.stage == 7 and info['avg_force'] < self.target_forces and \ info['avg_stress_diag'] < self.target_forces and \ info['avg_stress_non_diag'] < self.target_forces: break else: print('Failure to get forces and stress') print('UPDATING INPUTS FOR NEXT RUN') self.update() vj.run(use_mpi=True, mpi_num_procs=nparal) if self.waiting: vj.runner.wait() else: filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): vasp_stdout = read_vasp_stdout(filename=filename) if len(vasp_stdout['iterations']) > 0: pcm_log.debug('[%s] SCF: %s' % (os.path.basename(self.workdir), str(vasp_stdout['iterations']))) # if os.path.isfile(self.workdir + os.sep + 'OUTCAR'): # vj.get_outputs() time.sleep(30)
def run_status(self): if self.runner is None: pcm_log.info('Fireball not finish') filename = self.workdir + os.sep + 'fireball.log' if os.path.exists(filename): read_fireball_stdout(filename=filename) return if self.runner.poll() == 0: pcm_log.info('Fireball complete normally')
def run(self): n = 10 dftb = DFTBplus() kpoints = KPoints.optimized_grid(self.structure.lattice, kp_density=10000, force_odd=True) dftb.initialize(workdir=self.workdir, structure=self.structure, kpoints=kpoints) ans = dftb.set_slater_koster(search_paths=self.slater_path) if not ans: print('Slater-Koster files not complete') return grid = None energies = [] while True: density = n ** 3 kpoints = KPoints.optimized_grid(self.structure.lattice, kp_density=density, force_odd=True) if np.sum(grid) != np.sum(kpoints.grid): pcm_log.debug('Trial density: %d Grid: %s' % (density, kpoints.grid)) grid = list(kpoints.grid) dftb.kpoints = kpoints dftb.basic_input() dftb.hamiltonian['MaxSCCIterations'] = 50 if os.path.isfile('charges.bin'): dftb.hamiltonian['ReadInitialCharges'] = True dftb.hamiltonian['Mixer'] = {'name': 'DIIS'} dftb.set_static() dftb.set_inputs() dftb.run() if self.waiting: dftb.runner.wait() while True: if dftb.runner is not None and dftb.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % dftb.runner.returncode) filename = dftb.workdir + os.sep + 'detailed.out' if os.path.exists(filename): ret = read_detailed_out(filename) line = 'KPoint_grid= %15s iSCC= %4d Total_energy= %10.4f SCC_error= %9.3E' print(line % (grid, ret['SCC']['iSCC'], ret['total_energy'], ret['SCC']['SCC_error'])) else: print('detailed.out could not be found, exiting...') return n += 2 energies.append(ret['total_energy']) break time.sleep(10) self.results.append({'kp_grid': grid, 'iSCC': ret['SCC']['iSCC'], 'Total_energy': ret['total_energy'], 'SCC_error': ret['SCC']['SCC_error']}) else: n += 2 if len(energies) > 2 and abs(max(energies[-3:]) - min(energies[-3:])) < self.energy_tolerance: break
def run_one(self): # Get a static selection of the values in the generation that are relaxed selection = self.population.ids_sorted(self.actives_in_generation) pcm_log.info(' Size of selection : %d' % len(selection)) # Minus sign because we are searching for minima intensity = self.population.get_values(selection) for entry_id in intensity: intensity[entry_id] *= -1 discarded_index = self.nelite + int(sum(self.crossing_sets)) for i in range(min(self.nelite, len(selection))): entry_id = selection[i] pcm_log.debug('[%s] Promoted to new generation' % str(entry_id)) self.pass_to_new_generation(entry_id, reason='Elite') jump = 0 for i in range(min(len(self.crossing_sets), len(selection))): for j in range(self.crossing_sets[i]): entry_id = selection[i] if self.nelite + j + jump < len(selection): entry_jd = selection[self.nelite + j + jump] new_entry_id, new_entry_jd = self.population.cross( [entry_id, entry_jd]) pcm_log.info( 'Replace candidates %d and %d by crossing %d with %d' % (self.nelite + j + jump, discarded_index, self.nelite + j + jump, i)) pcm_log.debug('[%s] Moved to: %s' % (entry_id, new_entry_id)) self.replace_by_other(entry_jd, new_entry_id, reason='Cross between %s and %s' % (entry_id, entry_jd)) self.population.enable(new_entry_id) if discarded_index < len(selection): entry_kd = selection[discarded_index] pcm_log.debug('[%s] Moved to: %s' % (entry_kd, new_entry_jd)) self.replace_by_other( entry_kd, new_entry_jd, reason='Cross between %s and %s' % (entry_id, entry_jd)) self.population.enable(new_entry_jd) else: pcm_log.debug('Candidate %s will not be activated' % new_entry_jd) discarded_index += 1 jump += self.crossing_sets[i]
def run_status(self): if self.runner is None: pcm_log.info('DFTB+ not finish') filename = self.workdir + os.sep + 'dftb_stdout.log' if os.path.exists(filename): booleans, geom_optimization, stats = read_dftb_stdout(filename=filename) pcm_log.debug(str(booleans)) pcm_log.debug(str(geom_optimization)) pcm_log.debug(str(stats)) return if self.runner.poll() == 0: pcm_log.info('DFTB+ complete normally')
def run(self): dftb = DFTBplus(workdir=self.workdir) dftb.initialize(structure=self.structure, kpoints=self.kpoints) dftb.set_slater_koster(search_paths=self.slater_path) dftb.kpoints = self.kpoints if os.path.isfile('charges.bin'): os.remove('charges.bin') for mixer in ['Broyden', 'Anderson', 'DIIS', 'Simple']: dftb.basic_input() dftb.hamiltonian['MaxSCCIterations'] = self.MaxSCCIterations dftb.hamiltonian['Mixer'] = {'name': mixer} if os.path.isfile('charges.bin'): dftb.hamiltonian['ReadInitialCharges'] = True ret = None dftb.set_static() dftb.set_inputs() dftb.run() if self.waiting: dftb.runner.wait() while True: if dftb.runner is not None and dftb.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % dftb.runner.returncode) filename = dftb.workdir + os.sep + 'detailed.out' ret = read_detailed_out(filename) print( 'Mixer= %10s Total_energy= %9.3f iSCC= %4d SCC_error= %9.3E' % (mixer, ret['total_energy'], ret['SCC']['iSCC'], ret['SCC']['SCC_error'])) break time.sleep(10) if ret['SCC']['iSCC'] < self.MaxSCCIterations: break if ret is not None: self.results.append({ 'Mixer' 'kp_grid': self.kpoints.grid, 'iSCC': ret['SCC']['iSCC'], 'Total_energy': ret['total_energy'], 'SCC_error': ret['SCC']['SCC_error'] })
def correct_extras(self, changedb=False): for entry_id in self.get_generation(): if entry_id not in self.lineage_inv: print('Disabling one entry not in lineage_inv', entry_id) self.population.disable(entry_id) print(self.generation.pop(entry_id)) if changedb: self.pcdb.db.generations.remove({'_id': entry_id}) else: slot = self.lineage_inv[entry_id] if self.lineage[slot][-1] != entry_id: print('Disabling one entry not in lineage[slot][-1]', entry_id) self.population.disable(entry_id) print(self.generation.pop(entry_id)) if changedb: self.pcdb.db.generations.remove({'_id': entry_id}) if self.current_generation > 0: for slot in range(self.generation_size): entry_id = self.lineage[str(slot)][-1] if entry_id not in self.population.actives: print('Activating from lineage', entry_id) self.population.enable(entry_id) if entry_id not in self.get_generation(): self.set_generation(entry_id, self.current_generation) actives = self.population.actives for entry_id in actives: if entry_id not in self.get_generation(): print('Disabling ', entry_id) self.population.disable(entry_id) for entry_id in self.get_generation(): if entry_id not in self.population.actives: print('Enabling', entry_id) self.population.enable(entry_id) candidates_per_generation = [ len(self.get_generation(i)) for i in range(self.current_generation + 1) ] pcm_log.info('Candidates per generation: %s' % candidates_per_generation) pcm_log.info('Current generation: %d Candidates: %d' % (self.current_generation, len(self.get_generation()))) print('CANDIDATES:', candidates_per_generation) assert len(self.get_generation()) == self.generation_size assert min(candidates_per_generation) == max( candidates_per_generation)
def run(self): dftb = DFTBplus() dftb.initialize(workdir=self.workdir, structure=self.structure, kpoints=self.kpoints) dftb.set_slater_koster(search_paths=self.slater_path) dftb.kpoints = self.kpoints if os.path.isfile('charges.bin'): os.remove('charges.bin') for mixer in ['Broyden', 'Anderson', 'DIIS', 'Simple']: dftb.basic_input() dftb.hamiltonian['MaxSCCIterations'] = self.MaxSCCIterations dftb.hamiltonian['Mixer'] = {'name': mixer} if os.path.isfile('charges.bin'): dftb.hamiltonian['ReadInitialCharges'] = True ret = None dftb.set_static() dftb.set_inputs() dftb.run() if self.waiting: dftb.runner.wait() while True: if dftb.runner is not None and dftb.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % dftb.runner.returncode) filename = dftb.workdir + os.sep + 'detailed.out' ret = read_detailed_out(filename) print('Mixer= %10s Total_energy= %9.3f iSCC= %4d SCC_error= %9.3E' % (mixer, ret['total_energy'], ret['SCC']['iSCC'], ret['SCC']['SCC_error'])) break time.sleep(10) if ret['SCC']['iSCC'] < self.MaxSCCIterations: break if ret is not None: self.results.append({'Mixer' 'kp_grid': self.kpoints.grid, 'iSCC': ret['SCC']['iSCC'], 'Total_energy': ret['total_energy'], 'SCC_error': ret['SCC']['SCC_error']})
def run_one(self): # Get a static selection of the values in the generation that are relaxed selection = self.population.ids_sorted(self.actives_in_generation) pcm_log.info(' Size of selection : %d' % len(selection)) # Minus sign because we are searching for minima intensity = self.population.get_values(selection) for entry_id in intensity: intensity[entry_id] *= -1 discarded_index = self.nelite + int(sum(self.crossing_sets)) for i in range(min(self.nelite, len(selection))): entry_id = selection[i] pcm_log.debug('[%s] Promoted to new generation' % str(entry_id)) self.pass_to_new_generation(entry_id, reason='Elite') jump = 0 for i in range(min(len(self.crossing_sets), len(selection))): for j in range(self.crossing_sets[i]): entry_id = selection[i] if self.nelite + j + jump < len(selection): entry_jd = selection[self.nelite + j + jump] new_entry_id, new_entry_jd = self.population.cross([entry_id, entry_jd]) pcm_log.info('Replace candidates %d and %d by crossing %d with %d' % (self.nelite + j + jump, discarded_index, self.nelite + j + jump, i)) pcm_log.debug('[%s] Moved to: %s' % (entry_id, new_entry_id)) self.replace_by_other(entry_jd, new_entry_id, reason='Cross between %s and %s' % (entry_id, entry_jd)) self.population.enable(new_entry_id) if discarded_index < len(selection): entry_kd = selection[discarded_index] pcm_log.debug('[%s] Moved to: %s' % (entry_kd, new_entry_jd)) self.replace_by_other(entry_kd, new_entry_jd, reason='Cross between %s and %s' % (entry_id, entry_jd)) self.population.enable(new_entry_jd) else: pcm_log.debug('Candidate %s will not be activated' % new_entry_jd) discarded_index += 1 jump += self.crossing_sets[i]
def correct_extras(self, changedb=False): for entry_id in self.get_generation(): if entry_id not in self.lineage_inv: print('Disabling one entry not in lineage_inv', entry_id) self.population.disable(entry_id) print(self.generation.pop(entry_id)) if changedb: self.pcdb.db.generations.remove({'_id': entry_id}) else: slot = self.lineage_inv[entry_id] if self.lineage[slot][-1] != entry_id: print('Disabling one entry not in lineage[slot][-1]', entry_id) self.population.disable(entry_id) print(self.generation.pop(entry_id)) if changedb: self.pcdb.db.generations.remove({'_id': entry_id}) if self.current_generation > 0: for slot in range(self.generation_size): entry_id = self.lineage[str(slot)][-1] if entry_id not in self.population.actives: print('Activating from lineage', entry_id) self.population.enable(entry_id) if entry_id not in self.get_generation(): self.set_generation(entry_id, self.current_generation) actives = self.population.actives for entry_id in actives: if entry_id not in self.get_generation(): print('Disabling ', entry_id) self.population.disable(entry_id) for entry_id in self.get_generation(): if entry_id not in self.population.actives: print('Enabling', entry_id) self.population.enable(entry_id) candidates_per_generation = [len(self.get_generation(i)) for i in range(self.current_generation + 1)] pcm_log.info('Candidates per generation: %s' % candidates_per_generation) pcm_log.info('Current generation: %d Candidates: %d' % (self.current_generation, len(self.get_generation()))) print('CANDIDATES:',candidates_per_generation) assert len(self.get_generation()) == self.generation_size assert min(candidates_per_generation) == max(candidates_per_generation)
def run_one(self): # Get a static selection of the values in the generation that are relaxed selection = self.population.ids_sorted(self.actives_in_generation) pcm_log.info('Size of selection : %d' % len(selection)) # Minus sign because we are searching for minima intensity = self.population.get_values(selection) for entry_id in intensity: intensity[entry_id] *= -1 moves = {} new_selection = {} for entry_id in selection: new_selection[entry_id] = None moves[entry_id] = 0 # Move all the particles (Except the elite) # as the selection is sorted it means that the first one will no move pcm_log.debug('No Moving %d %s. Intensity: %7.3f' % (0, str(selection[0]), intensity[selection[0]])) for i in range(1, len(selection)): entry_id = selection[i] pcm_log.debug('Moving %d %s. Intensity: %7.3f' % (i, str(entry_id), intensity[entry_id])) new_selection[entry_id] = self.population.move(entry_id, selection[0], factor=self.beta0, in_place=False) factor = self.alpha0 * (self.delta ** self.current_generation) self.population.move_random(new_selection[entry_id], factor=factor, in_place=True) for entry_id in selection: if new_selection[entry_id] is not None: pcm_log.debug('[%s] Moved to: %s (%d moves)' % (str(entry_id), new_selection[entry_id], moves[entry_id])) self.replace_by_other(entry_id, new_selection[entry_id], reason='Moved %d times' % moves[entry_id]) self.population.enable(new_selection[entry_id]) else: pcm_log.debug('[%s] Promoted to new generation' % str(entry_id)) self.pass_to_new_generation(entry_id, reason='The best')
def run(self): """ Execute the total number of cycles :return: """ print(str(self)) print(str(self.population)) self.save_info() self.population.save_info() best_member = '' best_recorded = None survival_for_best = 0 while True: print('\nGENERATION: %d' % self.current_generation) self.print_status(level='DEBUG') pcm_log.debug('[%s] Enforcing the size of generation: %d' % (self.searcher_name, self.generation_size)) self.enforce_generation_size() self.update_lineages() self.old_actives = self.population.actives self.old_nextgen = self.get_generation(self.current_generation + 1) self.print_status() number_evaluated = len(self.population.actives_evaluated) while self.population.fraction_evaluated < 1.0: if len(self.population.actives_evaluated) != number_evaluated: pcm_log.debug( "Population '%s' still not evaluated. %4.0f %%" % (self.population.name, 100 * self.population.fraction_evaluated)) self.print_status(level='DEBUG') number_evaluated = len(self.population.actives_evaluated) self.population.replace_failed() time.sleep(self.sleep_time) pcm_log.debug("Population '%s' evaluated. %4.0f %%" % (self.population.name, 100 * self.population.fraction_evaluated)) best_member = self.population.best_candidate self.population.refine_progressive(best_member) print('Current best candidate: [%s] %s' % (best_member, self.population.str_entry(best_member))) if best_member in self.get_generation(): print('This candidate have survived for %d generations' % len(self.generation[best_member])) if len(self.generation[best_member] ) >= self.stabilization_limit: self.save_generations() break else: pcm_log.debug( 'Best candidate %s is not in the current generation' % best_member) #pcm_log.debug('Slot: %s' % self.lineage_inv[best_member]) #pcm_log.debug('Lineage: %s' % self.lineage[self.lineage_inv[best_member]]) if best_member != best_recorded: survival_for_best = 0 best_recorded = best_member else: survival_for_best += 1 if survival_for_best >= self.stabilization_limit: self.save_generations() break if self.target_value is not None: if self.population.value(best_member) <= self.target_value: print('Target value achieved: target=%9.3f best=%9.3f' % (self.population.value(best_member), self.target_value)) self.save_generations() break else: print('Best value = %7.3f target value = %7.3f' % (self.population.value(best_member), self.target_value)) pcm_log.debug('[%s] Removing not evaluated: %d' % (self.searcher_name, len(self.population.actives_no_evaluated))) for entry_id in self.population.actives_no_evaluated: self.replace_by_random(entry_id, reason='no evaluated') self.print_status() duplicates = self.population.check_duplicates( self.population.ids_sorted(self.population.actives_evaluated)) for entry_id in duplicates: change = { 'change': 'duplicate', 'to': duplicates[entry_id], 'reason': None } self.write_change(entry_id, change) self.replace_by_random(entry_id, reason='duplicate') pcm_log.info(' Duplicates identified and disabled: %d' % len(duplicates)) self.print_status(level='INFO') pcm_log.info(' Running one cycle for %s with %d candidates' % (self.searcher_name, len(self.actives_in_generation))) self.run_one() self.update_generation() print('Searcher ended after %d iterations' % self.current_generation) print('Best candidate: [%s] %s' % (best_member, self.population.str_entry(best_member)))
def run_one(self): # Get a static selection of the values in the generation that are relaxed selection = self.population.ids_sorted(self.actives_in_generation) pcm_log.info(' Size of selection : %d' % len(selection)) # For statistical purposes distances = [] intensities = [] atractiveness = [] # Minus sign because we are searching for minima intensity = self.population.get_values(selection) for entry_id in intensity: intensity[entry_id] *= -1 moves = {} new_selection = {} for entry_id in selection: new_selection[entry_id] = None moves[entry_id] = 0 # Move all the fireflies (Except the most brightness) # as the selection is sorted it means that the first one will no move pcm_log.debug('No Moving %d %s. Intensity: %7.3f' % (0, str(selection[0]), intensity[selection[0]])) for i in range(1, len(selection)): entry_id = selection[i] pcm_log.debug('Moving %d %s. Intensity: %7.3f' % (i, str(entry_id), intensity[entry_id])) # Moving in the direction of all the brighter fireflies if self.multi_move: for j in list(range(0, i))[::-1]: entry_jd = selection[j] distance = self.population.distance(entry_id, entry_jd) beta = self.beta0 * math.exp(-self.gamma * distance * distance) # The variation of attractiveness \beta with the distance r pcm_log.debug('[%s] Distance: %7.3f. Intensity: %7.3f. Atractiveness: %7.3f' % (str(entry_jd), distance, intensity[entry_jd], beta)) # Collecting Statistics distances.append(distance) intensities.append(intensity[entry_jd]) atractiveness.append(beta) if new_selection[entry_id] is None: new_selection[entry_id] = self.population.move(entry_id, entry_jd, factor=beta, in_place=False) if self.alpha0 > 0: factor = self.alpha0 * (self.delta ** self.current_generation) self.population.move_random(new_selection[entry_id], factor=factor, in_place=True) else: self.population.move(new_selection[entry_id], entry_jd, factor=beta, in_place=True) if self.alpha0 > 0: factor = self.alpha0 * (self.delta ** self.current_generation) self.population.move_random(new_selection[entry_id], factor=factor, in_place=True) # print(new_selection) moves[entry_id] += 1 # Moving in the direction of the closets brighter firefly else: distances = [self.population.distance(entry_id, entry_jd) for entry_jd in selection[:i]] distance = min(distances) target = selection[distances.index(distance)] beta = self.beta0 * math.exp(-self.gamma * distance * distance) # The variation of attractiveness \beta with the distance r pcm_log.debug('[%s] Distance: %7.3f. Intensity: %7.3f. Atractiveness: %7.3f' % (str(entry_jd), distance, intensity[entry_jd], beta)) new_selection[entry_id] = self.population.move(entry_id, target, factor=beta, in_place=False) factor = self.alpha0 * (self.delta ** self.current_generation) self.population.move_random(new_selection[entry_id], factor=factor, in_place=True) moves[entry_id] += 1 if len(distances) > 0: pcm_log.info('+----------------+--------------+-------------+-------------+') pcm_log.info('+ | Minimum | Maximum | Average |') pcm_log.info('+----------------+--------------+-------------+-------------+') pcm_log.info('+ Distances | %7.2f | %7.2f | %7.2f |' % (np.min(distances), np.max(distances), np.average(distances))) pcm_log.info('+ Intensities | %7.2f | %7.2f | %7.2f |' % (np.min(intensities), np.max(intensities), np.average(intensities))) pcm_log.info('+ Attractiveness | %7.2f | %7.2f | %7.2f |' % (np.min(atractiveness), np.max(atractiveness), np.average(atractiveness))) pcm_log.info('+----------------+--------------+-------------+-------------+') for entry_id in selection: if new_selection[entry_id] is not None: self.replace_by_other(entry_id, new_selection[entry_id], reason='Moved %d times' % moves[entry_id]) self.population.enable(new_selection[entry_id]) else: self.pass_to_new_generation(entry_id, reason='The best')
def run(self): irun = 0 score = INITIAL_SCORE dftb = DFTBplus() dftb.initialize(workdir=self.workdir, structure=self.structure, kpoints=self.kpoints) dftb.set_slater_koster(search_paths=self.slater_path) dftb.basic_input() dftb.driver['LatticeOpt'] = False # Decreasing the target_forces to avoid the final static # calculation of raising too much the forces after symmetrization dftb.driver['MaxForceComponent'] = self.target_forces dftb.driver['ConvergentForcesOnly'] = True dftb.driver['MaxSteps'] = 100 dftb.hamiltonian['MaxSCCIterations'] = 20 dftb.set_inputs() print(('Launching DFTB+ with target force of %9.2E ' % dftb.driver['MaxForceComponent'])) dftb.run() if self.waiting: dftb.runner.wait() while True: if dftb.runner is not None and dftb.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % dftb.runner.returncode) stdo = read_dftb_stdout(filename=self.workdir + os.sep + 'dftb_stdout.log') good_forces, good_stress = self.relaxation_status() if 'max_force' in stdo: print(( 'Converged: %s\t Max Force: %9.3e\t MaxForceComponent: %9.3e' % (stdo['ion_convergence'], stdo['max_force'], self.target_forces))) filename = dftb.workdir + os.sep + 'detailed.out' if not os.path.exists(filename): pcm_log.error('Could not find ' + filename) break if not good_forces and not good_stress: # This happens when all the SCC are completed without convergence dftb.driver['ConvergentForcesOnly'] = False else: dftb.driver['ConvergentForcesOnly'] = True score = self.quality(score) pcm_log.debug('Score : %d Good Forces: %s Good Stress: %s' % (score, good_forces, good_stress)) if score < 0: if good_forces and good_stress: pcm_log.debug('Convergence: Internals + Cell') dftb.driver['MovedAtoms'] = '1:-1' dftb.driver['LatticeOpt'] = True elif not good_forces and good_stress: pcm_log.debug('Convergence: Internals') dftb.driver['LatticeOpt'] = False dftb.driver['MovedAtoms'] = '1:-1' elif good_forces and not good_stress: pcm_log.debug('Convergence: Internals + Cell') dftb.driver['LatticeOpt'] = True dftb.driver['MovedAtoms'] = '1:-1' dftb.structure = read_geometry_gen(dftb.workdir + os.sep + 'geo_end.gen') # lets change the positions if the score have lowered to -10 if score == -10 and self.forced: dftb.structure.positions += 0.2 * np.random.rand( dftb.structure.natom, 3) - 0.1 dftb.structure.positions2reduced() dftb.structure.set_cell(1.1 * dftb.structure.cell) if score == -1 and self.forced: dftb.structure = dftb.structure.random_cell( dftb.structure.composition) print('RANDOM STRUCTURE') print((dftb.structure)) score = INITIAL_SCORE dftb.structure.save_json(dftb.workdir + os.sep + 'structure_current.json') if self.symmetrize: dftb.structure = symmetrize(dftb.structure) self.structure = dftb.structure dftb.get_geometry() dftb.roll_outputs(irun) dftb.set_inputs() irun += 1 print(('Launching DFTB+ with target force of %9.2E ' % dftb.driver['MaxForceComponent'])) dftb.run() if self.waiting: dftb.runner.wait() else: pcm_log.debug('Final static calculation') dftb.structure = self.get_final_geometry() dftb.structure.save_json(dftb.workdir + os.sep + 'structure_final.json') if self.symmetrize: dftb.structure = symmetrize(dftb.structure) self.structure = dftb.structure dftb.get_geometry() dftb.roll_outputs(irun) dftb.options['CalculateForces'] = True dftb.driver = {} dftb.set_inputs() print('Launching DFTB+ with static evaluation of forces ') dftb.run() if self.waiting: dftb.runner.wait() while dftb.runner.poll() is None: dftb.run_status() time.sleep(10) print('Completed Static run') forces, stress, total_energy = self.get_forces_stress_energy( ) if stress is None or forces is None or total_energy is None: pcm_log.debug( 'Null Forces, Stress or Energy, relaxing and exiting' ) dftb.basic_input() dftb.driver['LatticeOpt'] = False # Decreasing the target_forces to avoid the final static # calculation of raising too much the forces after symmetrization dftb.driver[ 'MaxForceComponent'] = 0.9 * self.target_forces dftb.driver['ConvergentForcesOnly'] = False dftb.driver['MaxSteps'] = 10 dftb.hamiltonian['MaxSCCIterations'] = 50 print((dftb.driver)) dftb.set_inputs() dftb.run() if self.waiting: dftb.runner.wait() while dftb.runner.poll() is None: time.sleep(10) print(('FINAL:', read_detailed_out(filename=filename))) forces, stress, total_energy = self.get_forces_stress_energy( ) if stress is None or forces is None or total_energy is None: pcm_log.debug( 'Again Null Forces, Stress or Energy, Randomizing Structure' ) dftb.structure = dftb.structure.random_cell( dftb.structure.composition) print('RANDOM STRUCTURE') print((dftb.structure)) score = INITIAL_SCORE else: break else: break else: pcm_log.debug('ID: %s' % os.path.basename(self.workdir)) filename = dftb.workdir + os.sep + 'dftb_stdout.log' if os.path.exists(filename): stdo = read_dftb_stdout(filename=filename) print(('Number of steps:', len(stdo['Geometry_Steps']))) if len(stdo['Geometry_Steps']) > 1: line = 'Energy behavior: ' prev_energy = stdo['Geometry_Steps'][0][ 'Total Energy']['value'] line += ' %7.3f ' % prev_energy for step in stdo['Geometry_Steps'][1:]: new_energy = step['Total Energy']['value'] if prev_energy > new_energy: line += '>' else: line += '<' prev_energy = new_energy finene = stdo['Geometry_Steps'][-1]['Total Energy'][ 'value'] line += ' %7.3f' % finene print(line) time.sleep(10)
def worker(db_settings, entry_id, workdir, target_forces, relaxator_params): pcdb = get_database(db_settings) pcm_log.info('[%s]: Starting relaxation. Target forces: %7.3e' % (str(entry_id), target_forces)) if pcdb.is_locked(entry_id): return else: pcdb.lock(entry_id) structure = pcdb.get_structure(entry_id) structure = structure.scale() print('relaxator_params', relaxator_params) relaxer = IonRelaxation2(structure, workdir=workdir, target_forces=target_forces, waiting=False, binary=relaxator_params['binary'], encut=1.3, kp_grid=None, kp_density=1E4, relax_cell=True) print('relaxing on:', relaxer.workdir) relaxer.run(relaxator_params['nmpiparal']) pcm_log.info('[%s]: Finished relaxation. Target forces: %7.3e' % (str(entry_id), target_forces)) filename = workdir + os.sep + 'OUTCAR' if os.path.isfile(filename): forces, stress, total_energy = relaxer.get_forces_stress_energy() if forces is not None: magnitude_forces = np.apply_along_axis(np.linalg.norm, 1, forces) print('Forces: Max: %9.3e Avg: %9.3e' % (np.max(magnitude_forces), np.average(magnitude_forces))) print('Stress: ', np.max(np.abs(stress.flatten()))) if forces is None: pcm_log.error('No forces found on %s' % filename) if stress is None: pcm_log.error('No stress found on %s' % filename) if total_energy is None: pcm_log.error('No total_energy found on %s' % filename) new_structure = relaxer.get_final_geometry() if forces is not None and stress is not None and total_energy is not None and new_structure is not None: pcm_log.info('[%s]: Updating properties' % str(entry_id)) pcdb.update(entry_id, structure=new_structure) te = total_energy pcdb.entries.update({'_id': entry_id}, {'$set': {'status.relaxation': 'succeed', 'status.target_forces': target_forces, 'properties.forces': generic_serializer(forces), 'properties.stress': generic_serializer(stress), 'properties.energy': te, 'properties.energy_pa': te / new_structure.natom, 'properties.energy_pf': te / new_structure.get_composition().gcd}}) # Fingerprint # Update the fingerprints only if the two structures are really different diffnatom = structure.natom != new_structure.natom diffcell = np.max(np.abs((structure.cell - new_structure.cell).flatten())) diffreduced = np.max(np.abs((structure.reduced - new_structure.reduced).flatten())) if diffnatom != 0 or diffcell > 1E-7 or diffreduced > 1E-7: analysis = StructureAnalysis(new_structure, radius=50) x, ys = analysis.fp_oganov(delta=0.01, sigma=0.01) fingerprint = {'_id': entry_id} for k in ys: atomic_number1 = atomic_number(new_structure.species[k[0]]) atomic_number2 = atomic_number(new_structure.species[k[1]]) pair = '%06d' % min(atomic_number1 * 1000 + atomic_number2, atomic_number2 * 1000 + atomic_number1) fingerprint[pair] = list(ys[k]) if pcdb.db.fingerprints.find_one({'_id': entry_id}) is None: pcdb.db.fingerprints.insert(fingerprint) else: pcdb.db.fingerprints.update({'_id': entry_id}, fingerprint) else: pcm_log.debug('Original and new structures are very similar.') pcm_log.debug('Max diff cell: %10.3e' % np.max(np.absolute((structure.cell - new_structure.cell).flatten()))) if structure.natom == new_structure.natom: pcm_log.debug('Max diff reduced coordinates: %10.3e' % np.max(np.absolute((structure.reduced - new_structure.reduced).flatten()))) else: pcdb.entries.update({'_id': entry_id}, {'$set': {'status.relaxation': 'failed'}}) pcm_log.error('Bad data after relaxation. Tagging relaxation as failed') else: pcm_log.error('ERROR: File not found %s' % filename) pcm_log.info('[%s]: Unlocking the entry' % str(entry_id)) pcdb.unlock(entry_id)
def worker_maise(db_settings, entry_id, workdir, relaxator_params): """ Relax and return evaluate the energy of the structure stored with identifier 'entry_id' using the MAISE code :param db_settings: (dict) Dictionary of DB parameters needed to create a PyChemiaDB object :param entry_id: MongoDB identifier of one entry of the database created from db_settings :param workdir: (str) Working directory where input and output from MAISE is written :param relaxator_params: (dict) Arguments needed to control the relaxation using MAISE Arguments are store as keys and they include: 'target_forces' : Used to defined the tolerance to consider one candidate as relaxed. 'source_dir': Directory with executable maise and directory INI :return: """ max_ncalls = 6 pcdb = get_database(db_settings) target_forces = relaxator_params['target_forces'] source_dir = relaxator_params['source_dir'] pcm_log.info('[%s]: Starting relaxation. Target forces: %7.3e' % (str(entry_id), target_forces)) if pcdb.is_locked(entry_id): return else: pcdb.lock(entry_id) structure = pcdb.get_structure(entry_id) status = pcdb.get_dicts(entry_id)[2] if 'ncalls' in status and status['ncalls'] > 0: ncalls = status['ncalls'] + 1 print('ncalls = ', status['ncalls']) else: ncalls = 1 print('Verifing initial structure...') while np.min(structure.distance_matrix()+(np.eye(structure.natom)*5)) < 1.9: print('ERROR: Bad initial guess, two atoms are to close. Creating new random structure for id: %s' % str(entry_id)) write_poscar(structure, workdir + os.sep + 'Fail_initial_POSCAR') # WIH structure = Structure.random_cell(structure.composition) write_poscar(structure, workdir + os.sep + 'POSCAR') if not os.path.exists(workdir + os.sep + 'setup') and ncalls == 1: # WIH print('First run.') # WIH # print('Verifying that everything runs smoothly') # WIH print(workdir + os.sep + 'setup') shutil.copy2(source_dir + os.sep + 'setup_1', workdir + os.sep + 'setup') # WIH elif ncalls > 1: # WIH shutil.copy2(source_dir + os.sep + 'setup_2', workdir + os.sep + 'setup') # WIH if not os.path.exists(workdir + os.sep + 'INI'): os.symlink(source_dir + os.sep + 'INI', workdir + os.sep + 'INI') if not os.path.exists(workdir + os.sep + 'maise'): os.symlink(source_dir + os.sep + 'maise', workdir + os.sep + 'maise') # Get the Current Working Directory # cwd = os.getcwd() # Move to the actual directory where maise will run os.chdir(workdir) wf = open('maise.stdout', 'w') subprocess.call(['./maise'], stdout=wf) wf.close() if os.path.isfile('OSZICAR'): energies = np.loadtxt('OSZICAR') else: energies = None forces = None stress = None stress_kb = None if os.path.isfile('OUTCAR'): rf = open('OUTCAR', 'r') data = rf.read() pos_forces = re.findall(r'TOTAL-FORCE \(eV/Angst\)\s*-*\s*([-.\d\s]+)\s+-{2}', data) pos_forces = np.array([x.split() for x in pos_forces], dtype=float) if len(pos_forces) > 0 and len(pos_forces[-1]) % 7 == 0: pos_forces.shape = (len(pos_forces), -1, 7) forces = pos_forces[:, :, 3:6] # positions = pos_forces[:, :, :3] else: print('Forces and Positions could not be parsed : ', pos_forces.shape) print('pos_forces =\n%s ' % pos_forces) str_stress = re.findall('Total([.\d\s-]*)in', data) if len(str_stress) == 2: stress = np.array([[float(y) for y in x.split()] for x in str_stress]) str_stress = re.findall('in kB([.\d\s-]*)energy', data) if len(str_stress) == 2: stress_kb = np.array([[float(y) for y in x.split()] for x in str_stress]) create_new = False if not os.path.isfile('CONTCAR') or os.path.getsize("CONTCAR") == 0: create_new = True print('CONTCAR not found in entry: %s' % str(entry_id)) i = 1 while True: if not os.path.isfile('POSCAR-failed-%03s' % str(i)): os.rename('POSCAR', 'POSCAR-failed-%03s' % str(i)) break else: i += 1 else: new_structure = read_poscar('CONTCAR') # min_dist = np.min(new_structure.distance_matrix+np.ones((new_structure.natom,new_structure.natom))) min_dist = np.min(new_structure.distance_matrix()+(np.eye(new_structure.natom)*5)) # WIH print('Minimal distance= %8.7f' % min_dist) # WIH if min_dist < 2.0: print('ERROR: MAISE finished with and structure with distances too close:', entry_id) # WIH write_poscar(new_structure, workdir + os.sep + 'Collapsed_CONTCAR') # WIH create_new = True # WIH if create_new: new_structure = Structure.random_cell(structure.composition) ncalls = 0 # WIH if ncalls > max_ncalls: print('WARNING: Too many calls to MAISE and no relaxation succeeded, replacing structure: ', entry_id) # WIH new_structure = Structure.random_cell(structure.composition) pcdb.entries.update({'_id': entry_id}, {'$set': {'status.ncalls': 0}}) create_new = True else: pcdb.entries.update({'_id': entry_id}, {'$set': {'status.ncalls': ncalls}}) pcdb.update(entry_id, structure=new_structure, properties={}) # if not create_new and energies is not None and forces is not None and stress is not None: if energies is not None and forces is not None and stress is not None: te = energies[1] pcdb.entries.update({'_id': entry_id}, {'$set': {'status.relaxation': 'succeed', 'status.target_forces': target_forces, 'properties.initial_forces': generic_serializer(forces[0]), 'properties.initial_stress': generic_serializer(stress[0]), 'properties.initial_stress_kB': generic_serializer(stress_kb[0]), 'properties.forces': generic_serializer(forces[1]), 'properties.stress': generic_serializer(stress[1]), 'properties.stress_kB': generic_serializer(stress_kb[1]), 'properties.energy': te, 'properties.energy_pa': te / new_structure.natom, 'properties.energy_pf': te / new_structure.get_composition().gcd}}) for ifile in ['POSCAR', 'CONTCAR', 'setup', 'OUTCAR', 'maise.stdout', 'list.dat']: if not os.path.exists(ifile): wf = open(ifile, 'w') wf.write('') wf.close() n = 1 while True: if os.path.exists(ifile + ('_%03d' % n)): n += 1 else: break os.rename(ifile, ifile+('_%03d' % n)) pcm_log.info('[%s]: Unlocking the entry' % str(entry_id)) pcdb.unlock(entry_id)
def worker_maise(db_settings, entry_id, workdir, relaxator_params): """ Relax and return evaluate the energy of the structure stored with identifier 'entry_id' using the MAISE code :param db_settings: (dict) Dictionary of DB parameters needed to create a PyChemiaDB object :param entry_id: MongoDB identifier of one entry of the database created from db_settings :param workdir: (str) Working directory where input and output from MAISE is written :param relaxator_params: (dict) Arguments needed to control the relaxation using MAISE Arguments are store as keys and they include: 'target_forces' : Used to defined the tolerance to consider one candidate as relaxed. 'source_dir': Directory with executable maise and directory INI :return: """ max_ncalls = 6 pcdb = get_database(db_settings) target_forces = relaxator_params['target_forces'] source_dir = relaxator_params['source_dir'] pcm_log.info('[%s]: Starting relaxation. Target forces: %7.3e' % (str(entry_id), target_forces)) if pcdb.is_locked(entry_id): return else: pcdb.lock(entry_id) structure = pcdb.get_structure(entry_id) status = pcdb.get_dicts(entry_id)[2] if 'ncalls' in status and status['ncalls'] > 0: ncalls = status['ncalls'] + 1 print('ncalls = ', status['ncalls']) else: ncalls = 1 print('Verifing initial structure...') while np.min(structure.distance_matrix() + (np.eye(structure.natom) * 5)) < 1.9: print( 'ERROR: Bad initial guess, two atoms are to close. Creating new random structure for id: %s' % str(entry_id)) write_poscar(structure, workdir + os.sep + 'Fail_initial_POSCAR') # WIH structure = Structure.random_cell(structure.composition) write_poscar(structure, workdir + os.sep + 'POSCAR') if not os.path.exists(workdir + os.sep + 'setup') and ncalls == 1: # WIH print('First run.') # WIH # print('Verifying that everything runs smoothly') # WIH print(workdir + os.sep + 'setup') shutil.copy2(source_dir + os.sep + 'setup_1', workdir + os.sep + 'setup') # WIH elif ncalls > 1: # WIH shutil.copy2(source_dir + os.sep + 'setup_2', workdir + os.sep + 'setup') # WIH if not os.path.exists(workdir + os.sep + 'INI'): os.symlink(source_dir + os.sep + 'INI', workdir + os.sep + 'INI') if not os.path.exists(workdir + os.sep + 'maise'): os.symlink(source_dir + os.sep + 'maise', workdir + os.sep + 'maise') # Get the Current Working Directory # cwd = os.getcwd() # Move to the actual directory where maise will run os.chdir(workdir) wf = open('maise.stdout', 'w') subprocess.call(['./maise'], stdout=wf) wf.close() if os.path.isfile('OSZICAR'): energies = np.loadtxt('OSZICAR') else: energies = None forces = None stress = None stress_kb = None if os.path.isfile('OUTCAR'): rf = open('OUTCAR', 'r') data = rf.read() pos_forces = re.findall( r'TOTAL-FORCE \(eV/Angst\)\s*-*\s*([-.\d\s]+)\s+-{2}', data) pos_forces = np.array([x.split() for x in pos_forces], dtype=float) if len(pos_forces) > 0 and len(pos_forces[-1]) % 7 == 0: pos_forces.shape = (len(pos_forces), -1, 7) forces = pos_forces[:, :, 3:6] # positions = pos_forces[:, :, :3] else: print('Forces and Positions could not be parsed : ', pos_forces.shape) print('pos_forces =\n%s ' % pos_forces) str_stress = re.findall('Total([.\d\s-]*)in', data) if len(str_stress) == 2: stress = np.array([[float(y) for y in x.split()] for x in str_stress]) str_stress = re.findall('in kB([.\d\s-]*)energy', data) if len(str_stress) == 2: stress_kb = np.array([[float(y) for y in x.split()] for x in str_stress]) create_new = False if not os.path.isfile('CONTCAR') or os.path.getsize("CONTCAR") == 0: create_new = True print('CONTCAR not found in entry: %s' % str(entry_id)) i = 1 while True: if not os.path.isfile('POSCAR-failed-%03s' % str(i)): os.rename('POSCAR', 'POSCAR-failed-%03s' % str(i)) break else: i += 1 else: new_structure = read_poscar('CONTCAR') # min_dist = np.min(new_structure.distance_matrix+np.ones((new_structure.natom,new_structure.natom))) min_dist = np.min(new_structure.distance_matrix() + (np.eye(new_structure.natom) * 5)) # WIH print('Minimal distance= %8.7f' % min_dist) # WIH if min_dist < 2.0: print( 'ERROR: MAISE finished with and structure with distances too close:', entry_id) # WIH write_poscar(new_structure, workdir + os.sep + 'Collapsed_CONTCAR') # WIH create_new = True # WIH if create_new: new_structure = Structure.random_cell(structure.composition) ncalls = 0 # WIH if ncalls > max_ncalls: print( 'WARNING: Too many calls to MAISE and no relaxation succeeded, replacing structure: ', entry_id) # WIH new_structure = Structure.random_cell(structure.composition) pcdb.entries.update({'_id': entry_id}, {'$set': {'status.ncalls': 0}}) create_new = True else: pcdb.entries.update({'_id': entry_id}, {'$set': { 'status.ncalls': ncalls }}) pcdb.update(entry_id, structure=new_structure, properties={}) # if not create_new and energies is not None and forces is not None and stress is not None: if energies is not None and forces is not None and stress is not None: te = energies[1] pcdb.entries.update({'_id': entry_id}, { '$set': { 'status.relaxation': 'succeed', 'status.target_forces': target_forces, 'properties.initial_forces': generic_serializer(forces[0]), 'properties.initial_stress': generic_serializer(stress[0]), 'properties.initial_stress_kB': generic_serializer( stress_kb[0]), 'properties.forces': generic_serializer(forces[1]), 'properties.stress': generic_serializer(stress[1]), 'properties.stress_kB': generic_serializer(stress_kb[1]), 'properties.energy': te, 'properties.energy_pa': te / new_structure.natom, 'properties.energy_pf': te / new_structure.get_composition().gcd } }) for ifile in [ 'POSCAR', 'CONTCAR', 'setup', 'OUTCAR', 'maise.stdout', 'list.dat' ]: if not os.path.exists(ifile): wf = open(ifile, 'w') wf.write('') wf.close() n = 1 while True: if os.path.exists(ifile + ('_%03d' % n)): n += 1 else: break os.rename(ifile, ifile + ('_%03d' % n)) pcm_log.info('[%s]: Unlocking the entry' % str(entry_id)) pcdb.unlock(entry_id)
def run_one(self): # Get a static selection of the values in the generation that are relaxed selection = self.population.ids_sorted( self.population.actives_evaluated) pcm_log.info('Size of selection : %d' % len(selection))
def run(self, nparal=1): self.started = True self.cleaner() vj = self.vaspjob ncalls = 1 self.first_run(nparal) while True: if vj.runner is not None and vj.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % vj.runner.returncode) filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): read_vasp_stdout(filename=filename) ncalls += 1 va = VaspAnalyser(self.workdir) va.run() max_force, max_stress = self.get_max_force_stress() print('Max Force: %9.3E Stress: %9.3E (target forces= %E)' % (max_force, max_stress, self.target_forces)) if max_force is not None and max_force < self.target_forces: # Conditions to finish the run if max_stress < self.target_forces: self.success = True break elif not self.relax_cell: self.success = True break elif ncalls >= self.max_calls: self.success = False break self.update() vj.run(use_mpi=True, mpi_num_procs=nparal) if self.waiting: vj.runner.wait() else: filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): vasp_stdout = read_vasp_stdout(filename=filename) if len(vasp_stdout['iterations']) > 0: pcm_log.debug('[%s] SCF: %s' % (os.path.basename( self.workdir), str(vasp_stdout['iterations']))) # if len(vasp_stdout['energies']) > 2: # energy_str = ' %9.3E' % vasp_stdout['energies'][0] # for i in range(1, len(vasp_stdout['energies'])): # if vasp_stdout['energies'][i] < vasp_stdout['energies'][i-1]: # energy_str += ' >' # else: # energy_str += ' <' # pcm_log.debug(energy_str) time.sleep(30) outcars = sorted([ x for x in os.listdir(self.workdir) if x.startswith('OUTCAR') ])[::-1] vo = VaspOutput(self.workdir + os.sep + outcars[0]) forces = vo.forces stress = vo.stress if len(outcars) > 1: for i in outcars[1:]: vo = VaspOutput(self.workdir + os.sep + i) forces = np.concatenate((forces, vo.forces)) stress = np.concatenate((stress, vo.stress)) vj.get_outputs() self.output = { 'forces': generic_serializer(forces), 'stress': generic_serializer(stress), 'energy': vj.outcar.energy, 'energies': generic_serializer(vj.outcar.energies) } if vj.outcar.is_finished: self.finished = True
def run(self, nparal=1): self.started = True self.cleaner() vj = self.vaspjob ncalls = 1 self.first_run(nparal) while True: if vj.runner is not None and vj.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % vj.runner.returncode) filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): read_vasp_stdout(filename=filename) ncalls += 1 va = VaspAnalyser(self.workdir) va.run() max_force, max_stress = self.get_max_force_stress() print('Max Force: %9.3E Stress: %9.3E (target forces= %E)' % (max_force, max_stress, self.target_forces)) if max_force is not None and max_force < self.target_forces: # Conditions to finish the run if max_stress < self.target_forces: self.success = True break elif not self.relax_cell: self.success = True break elif ncalls >= self.max_calls: self.success = False break self.update() vj.run(use_mpi=True, mpi_num_procs=nparal) if self.waiting: vj.runner.wait() else: filename = self.workdir + os.sep + 'vasp_stdout.log' if os.path.exists(filename): vasp_stdout = read_vasp_stdout(filename=filename) if len(vasp_stdout['iterations']) > 0: pcm_log.debug('[%s] SCF: %s' % (os.path.basename(self.workdir), str(vasp_stdout['iterations']))) # if len(vasp_stdout['energies']) > 2: # energy_str = ' %9.3E' % vasp_stdout['energies'][0] # for i in range(1, len(vasp_stdout['energies'])): # if vasp_stdout['energies'][i] < vasp_stdout['energies'][i-1]: # energy_str += ' >' # else: # energy_str += ' <' # pcm_log.debug(energy_str) time.sleep(30) outcars = sorted([x for x in os.listdir(self.workdir) if x.startswith('OUTCAR')])[::-1] vo = VaspOutput(self.workdir + os.sep + outcars[0]) forces = vo.forces stress = vo.stress if len(outcars) > 1: for i in outcars[1:]: vo = VaspOutput(self.workdir + os.sep + i) forces = np.concatenate((forces, vo.forces)) stress = np.concatenate((stress, vo.stress)) vj.get_outputs() self.output = {'forces': generic_serializer(forces), 'stress': generic_serializer(stress), 'energy': vj.outcar.energy, 'energies': generic_serializer(vj.outcar.energies)} if vj.outcar.is_finished: self.finished = True
def worker(db_settings, entry_id, workdir, target_forces, relaxator_params): pcdb = get_database(db_settings) pcm_log.info('[%s]: Starting relaxation. Target forces: %7.3e' % (str(entry_id), target_forces)) if pcdb.is_locked(entry_id): return else: pcdb.lock(entry_id) structure = pcdb.get_structure(entry_id) structure = structure.scale() print('relaxator_params', relaxator_params) relaxer = IonRelaxation(structure, workdir=workdir, target_forces=target_forces, waiting=False, binary=relaxator_params['binary'], encut=1.3, kp_grid=None, kp_density=1E4, relax_cell=True, max_calls=10) print('relaxing on:', relaxer.workdir) relaxer.run(relaxator_params['nmpiparal']) pcm_log.info('[%s]: Finished relaxation. Target forces: %7.3e' % (str(entry_id), target_forces)) filename = workdir + os.sep + 'OUTCAR' if os.path.isfile(filename): forces, stress, total_energy = relaxer.get_forces_stress_energy() if forces is not None: magnitude_forces = np.apply_along_axis(np.linalg.norm, 1, forces) print('Forces: Max: %9.3e Avg: %9.3e' % (np.max(magnitude_forces), np.average(magnitude_forces))) print('Stress: ', np.max(np.abs(stress.flatten()))) if forces is None: pcm_log.error('No forces found on %s' % filename) if stress is None: pcm_log.error('No stress found on %s' % filename) if total_energy is None: pcm_log.error('No total_energy found on %s' % filename) new_structure = relaxer.get_final_geometry() if forces is not None and stress is not None and total_energy is not None and new_structure is not None: pcm_log.info('[%s]: Updating properties' % str(entry_id)) pcdb.update(entry_id, structure=new_structure) te = total_energy pcdb.entries.update({'_id': entry_id}, { '$set': { 'status.relaxation': 'succeed', 'status.target_forces': target_forces, 'properties.forces': generic_serializer(forces), 'properties.stress': generic_serializer(stress), 'properties.energy': te, 'properties.energy_pa': te / new_structure.natom, 'properties.energy_pf': te / new_structure.get_composition().gcd } }) # Fingerprint # Update the fingerprints only if the two structures are really different diffnatom = structure.natom != new_structure.natom diffcell = np.max( np.abs((structure.cell - new_structure.cell).flatten())) diffreduced = np.max( np.abs((structure.reduced - new_structure.reduced).flatten())) if diffnatom != 0 or diffcell > 1E-7 or diffreduced > 1E-7: analysis = StructureAnalysis(new_structure, radius=50) x, ys = analysis.fp_oganov(delta=0.01, sigma=0.01) fingerprint = {'_id': entry_id} for k in ys: atomic_number1 = atomic_number(new_structure.species[k[0]]) atomic_number2 = atomic_number(new_structure.species[k[1]]) pair = '%06d' % min(atomic_number1 * 1000 + atomic_number2, atomic_number2 * 1000 + atomic_number1) fingerprint[pair] = list(ys[k]) if pcdb.db.fingerprints.find_one({'_id': entry_id}) is None: pcdb.db.fingerprints.insert(fingerprint) else: pcdb.db.fingerprints.update({'_id': entry_id}, fingerprint) else: pcm_log.debug('Original and new structures are very similar.') pcm_log.debug('Max diff cell: %10.3e' % np.max( np.absolute( (structure.cell - new_structure.cell).flatten()))) if structure.natom == new_structure.natom: pcm_log.debug( 'Max diff reduced coordinates: %10.3e' % np.max( np.absolute((structure.reduced - new_structure.reduced).flatten()))) else: pcdb.entries.update({'_id': entry_id}, {'$set': { 'status.relaxation': 'failed' }}) pcm_log.error( 'Bad data after relaxation. Tagging relaxation as failed') else: pcm_log.error('ERROR: File not found %s' % filename) pcm_log.info('[%s]: Unlocking the entry' % str(entry_id)) pcdb.unlock(entry_id)
def run_one(self): # Get a static selection of the values in the generation that are relaxed selection = self.population.ids_sorted(self.actives_in_generation) pcm_log.info('Size of selection : %d' % len(selection)) if self.scouts_elite is None: print('First run', len(selection), self.ns) # During the first iteration all the selection are scouts # Lets choose from there the elite, the best and the actual # scouts for the next iterations if len(selection) >= self.ns: self.scouts_elite = list(selection[:self.ne]) self.scouts_best = list(selection[self.ne:self.ne + self.nb]) self.scouts_others = list(selection[self.ne + self.nb:self.ns - self.ne + self.nb]) # Disable extra scouts (from an initial population) for entry_id in selection[self.ns:]: self.population.disable(entry_id) self.print_status() # Add nre foragers around each elite scout for entry_id in self.scouts_elite: self.pass_to_new_generation(entry_id, reason='Elite') self.create_foragers(entry_id, self.nre) self.print_status() # Add nrb foragers around each best scout for entry_id in self.scouts_best: self.pass_to_new_generation(entry_id, reason='Best') self.create_foragers(entry_id, self.nrb) self.print_status() for u in range(self.generation_size - len(self.get_generation(self.current_generation + 1))): ident, origin = self.population.add_random() self.population.disable(ident) self.generation[ident] = [self.current_generation + 1] self.scouts_others.append(ident) print('Added to other scouts', ident) self.print_status() else: pass # Number of scouts insufficient, increase their number with more # random members # for i in range(selection, self.ns): # entry_id=self.population.add_random() # print 'Population raised:',entry_id else: # New iterations look into each patch and see witch bees are on each patch print('Foragers %d' % len(self.foragers), self.foragers) print('Elite %d' % len(self.scouts_elite), self.scouts_elite) print('Best %d' % len(self.scouts_best), self.scouts_best) print('Others %d' % len(self.scouts_others), self.scouts_others) self.process_scouts(self.scouts_elite, selection) self.process_scouts(self.scouts_best, selection) dead_bees = 0 for j in list(self.scouts_others): if j not in selection: # Consider a dead bee self.scouts_others.remove(j) dead_bees += 1 scouts_others_alive = self.population.ids_sorted(self.scouts_others) if len(scouts_others_alive) > 0: best_scout = scouts_others_alive[0] worst_best = self.population.ids_sorted(self.scouts_best)[-1] if self.population.value(best_scout) < self.population.value(worst_best): self.scouts_best.remove(worst_best) self.scouts_best.append(best_scout) self.population.disable(worst_best) # Add nre foragers around each elite scout for entry_id in self.scouts_elite: self.pass_to_new_generation(entry_id, reason='Elite') self.foragers[entry_id] = [] self.create_foragers(entry_id, self.nre) self.print_status() # Add nrb foragers around each best scout for entry_id in self.scouts_best: self.pass_to_new_generation(entry_id, reason='Best') self.foragers[entry_id] = [] self.create_foragers(entry_id, self.nrb) self.print_status() for i in self.scouts_others: self.population.disable(i) self.print_status() # for i in range(self.ns - self.ne - self.nb): for u in range(self.generation_size - len(self.get_generation(self.current_generation + 1))): ident, origin = self.population.add_random() self.population.disable(ident) self.generation[ident] = [self.current_generation + 1] self.scouts_others.append(ident) print('Added to other scouts', ident) print('After run_one:') self.print_status()
def worker_maise(db_settings, entry_id, workdir, target_forces, relaxator_params): max_ncalls = 6 pcdb = get_database(db_settings) pcm_log.info('[%s]: Starting relaxation. Target forces: %7.3e' % (str(entry_id), target_forces)) if pcdb.is_locked(entry_id): return else: pcdb.lock(entry_id) structure = pcdb.get_structure(entry_id) status = pcdb.get_dicts(entry_id)[2] if 'ncalls' in status: ncalls = status['ncalls'] + 1 else: ncalls = 1 #print('Current directory: '+os.getcwd() ) #print('Working directory: '+workdir) write_poscar(structure,workdir+os.sep+'POSCAR') if not os.path.exists(workdir+os.sep+'setup'): shutil.copy2('setup', workdir) if not os.path.exists(workdir+os.sep+'INI'): os.symlink(os.getcwd()+os.sep+'INI', workdir+os.sep+'INI') if not os.path.exists(workdir+os.sep+'maise'): os.symlink(os.getcwd()+os.sep+'maise', workdir+os.sep+'maise') cwd=os.getcwd() os.chdir(workdir) wf=open('maise.stdout','w') subprocess.call(['./maise'], stdout=wf) wf.close() if os.path.isfile('OSZICAR'): energies=np.loadtxt('OSZICAR') else: energies=None if os.path.isfile('OUTCAR'): rf = open('OUTCAR', 'r') data = rf.read() pos_forces = re.findall(r'TOTAL-FORCE \(eV/Angst\)\s*-*\s*([-.\d\s]+)\s+-{2}', data) pos_forces = np.array([x.split() for x in pos_forces], dtype=float) if len(pos_forces) > 0 and len(pos_forces[-1]) % 7 == 0: pos_forces.shape = (len(pos_forces), -1, 7) forces = pos_forces[:, :, 3:6] positions = pos_forces[:, :, :3] else: print('Forces and Positions could not be parsed : ', pos_forces.shape) print('pos_forces =\n%s ' % pos_forces) str_stress=re.findall('Total([\.\d\s-]*)in',data) if len(str_stress)==2: stress = np.array([[float(y) for y in x.split()] for x in str_stress]) str_stress=re.findall('in kB([\.\d\s-]*)energy',data) if len(str_stress)==2: stress_kB = np.array([[float(y) for y in x.split()] for x in str_stress]) else: forces=None stress=None stress_kB=None new_structure=read_poscar('CONTCAR') if np.min(new_structure.distance_matrix()+np.eye(new_structure.natom))<0.23: print('WARNING: Structure collapse 2 atoms, creating a new random structure') new_structure=Structure.random_cell(new_structure.composition) if ncalls > max_ncalls: print('WARNING: Too many calls to MAISE and no relaxation succeeded, replacing structure') new_structure=Structure.random_cell(new_structure.composition) pcdb.entries.update({'_id': entry_id}, {'$set': {'status.ncalls': 0}}) else: pcdb.entries.update({'_id': entry_id}, {'$set': {'status.ncalls': ncalls}}) pcdb.update(entry_id, structure=new_structure) if energies is not None and forces is not None and stress is not None: te = energies[1] pcdb.entries.update({'_id': entry_id}, {'$set': {'status.relaxation': 'succeed', 'status.target_forces': target_forces, 'properties.initial_forces': generic_serializer(forces[0]), 'properties.initial_stress': generic_serializer(stress[0]), 'properties.initial_stress_kB': generic_serializer(stress_kB[0]), 'properties.forces': generic_serializer(forces[1]), 'properties.stress': generic_serializer(stress[1]), 'properties.stress_kB': generic_serializer(stress_kB[1]), 'properties.energy': te, 'properties.energy_pa': te / new_structure.natom, 'properties.energy_pf': te / new_structure.get_composition().gcd}}) for ifile in ['POSCAR', 'CONTCAR', 'setup', 'OUTCAR', 'maise.stdout', 'list.dat']: if not os.path.exists(ifile): wf = open(ifile, 'w') wf.write('') wf.close() n=1 while True: if os.path.exists(ifile+ ('_%03d' % n)): n+=1 else: break os.rename(ifile,ifile+('_%03d' % n)) pcm_log.info('[%s]: Unlocking the entry' % str(entry_id)) pcdb.unlock(entry_id)
elif option == 'ssl': ssl = True else: print('Unknown option. --' + option) if dbname is None: help_info() sys.exit(1) db_settings = {'name': dbname, 'host': host, 'port': port, 'ssl': ssl} if user is not None: if passwd is None: raise ValueError('Password is mandatory if user is entered') db_settings['user'] = user db_settings['passwd'] = passwd pcdb = pychemia.db.get_database(db_settings) cifs = [x for x in os.listdir(path) if x[-3:] == 'cif'] for i in cifs: pcm_log.info('Reading CIF : %s' % i) structs = cif2structure(path + os.sep + i) pcm_log.info('Number of structures: %d' % len(structs)) for j in structs: if j.is_perfect: pcm_log.info('Composition : %s' % str(j.composition)) pcdb.insert(structure=j) else: pcm_log.info('DISCARDED: Structure is not perfect')
def run_one(self): # Get a static selection of the values in the generation that are relaxed selection = self.population.ids_sorted(self.actives_in_generation) pcm_log.info('Size of selection : %d' % len(selection)) if self.scouts_elite is None: print('First run', len(selection), self.ns) # During the first iteration all the selection are scouts # Lets choose from there the elite, the best and the actual # scouts for the next iterations if len(selection) >= self.ns: self.scouts_elite = list(selection[:self.ne]) self.scouts_best = list(selection[self.ne:self.ne + self.nb]) self.scouts_others = list(selection[self.ne + self.nb:self.ns - self.ne + self.nb]) # Disable extra scouts (from an initial population) for entry_id in selection[self.ns:]: self.population.disable(entry_id) self.print_status() # Add nre foragers around each elite scout for entry_id in self.scouts_elite: self.pass_to_new_generation(entry_id, reason='Elite') self.create_foragers(entry_id, self.nre) self.print_status() # Add nrb foragers around each best scout for entry_id in self.scouts_best: self.pass_to_new_generation(entry_id, reason='Best') self.create_foragers(entry_id, self.nrb) self.print_status() for u in range( self.generation_size - len(self.get_generation(self.current_generation + 1))): ident, origin = self.population.add_random() self.population.disable(ident) self.generation[ident] = [self.current_generation + 1] self.scouts_others.append(ident) print('Added to other scouts', ident) self.print_status() else: pass # Number of scouts insufficient, increase their number with more # random members # for i in range(selection, self.ns): # entry_id=self.population.add_random() # print 'Population raised:',entry_id else: # New iterations look into each patch and see witch bees are on each patch print('Foragers %d' % len(self.foragers), self.foragers) print('Elite %d' % len(self.scouts_elite), self.scouts_elite) print('Best %d' % len(self.scouts_best), self.scouts_best) print('Others %d' % len(self.scouts_others), self.scouts_others) self.process_scouts(self.scouts_elite, selection) self.process_scouts(self.scouts_best, selection) dead_bees = 0 for j in list(self.scouts_others): if j not in selection: # Consider a dead bee self.scouts_others.remove(j) dead_bees += 1 scouts_others_alive = self.population.ids_sorted( self.scouts_others) if len(scouts_others_alive) > 0: best_scout = scouts_others_alive[0] worst_best = self.population.ids_sorted(self.scouts_best)[-1] if self.population.value(best_scout) < self.population.value( worst_best): self.scouts_best.remove(worst_best) self.scouts_best.append(best_scout) self.population.disable(worst_best) # Add nre foragers around each elite scout for entry_id in self.scouts_elite: self.pass_to_new_generation(entry_id, reason='Elite') self.foragers[entry_id] = [] self.create_foragers(entry_id, self.nre) self.print_status() # Add nrb foragers around each best scout for entry_id in self.scouts_best: self.pass_to_new_generation(entry_id, reason='Best') self.foragers[entry_id] = [] self.create_foragers(entry_id, self.nrb) self.print_status() for i in self.scouts_others: self.population.disable(i) self.print_status() # for i in range(self.ns - self.ne - self.nb): for u in range(self.generation_size - len(self.get_generation(self.current_generation + 1))): ident, origin = self.population.add_random() self.population.disable(ident) self.generation[ident] = [self.current_generation + 1] self.scouts_others.append(ident) print('Added to other scouts', ident) print('After run_one:') self.print_status()
def run(self): """ Execute the total number of cycles :return: """ print(str(self)) print(str(self.population)) self.save_info() self.population.save_info() best_member = '' best_recorded = None survival_for_best=0 while True: print('\nGENERATION: %d' % self.current_generation) self.print_status(level='DEBUG') pcm_log.debug('[%s] Enforcing the size of generation: %d' % (self.searcher_name, self.generation_size)) self.enforce_generation_size() self.update_lineages() self.old_actives = self.population.actives self.old_nextgen = self.get_generation(self.current_generation + 1) self.print_status() number_evaluated = len(self.population.actives_evaluated) while self.population.fraction_evaluated < 1.0: if len(self.population.actives_evaluated) != number_evaluated: pcm_log.debug("Population '%s' still not evaluated. %4.0f %%" % (self.population.name, 100 * self.population.fraction_evaluated)) self.print_status(level='DEBUG') number_evaluated = len(self.population.actives_evaluated) self.population.replace_failed() time.sleep(self.sleep_time) pcm_log.debug("Population '%s' evaluated. %4.0f %%" % (self.population.name, 100 * self.population.fraction_evaluated)) best_member = self.population.best_candidate self.population.refine_progressive(best_member) print('Current best candidate: [%s] %s' % (best_member, self.population.str_entry(best_member))) if best_member in self.get_generation(): print('This candidate have survived for %d generations' % len(self.generation[best_member])) if len(self.generation[best_member]) >= self.stabilization_limit: self.save_generations() break else: pcm_log.debug('Best candidate %s is not in the current generation' % best_member) #pcm_log.debug('Slot: %s' % self.lineage_inv[best_member]) #pcm_log.debug('Lineage: %s' % self.lineage[self.lineage_inv[best_member]]) if best_member != best_recorded: survival_for_best=0 best_recorded=best_member else: survival_for_best+=1 if survival_for_best >= self.stabilization_limit: self.save_generations() break if self.target_value is not None: if self.population.value(best_member) <= self.target_value: print('Target value achieved: target=%9.3f best=%9.3f' % (self.population.value(best_member), self.target_value)) self.save_generations() break else: print('Best value = %7.3f target value = %7.3f' % (self.population.value(best_member), self.target_value)) pcm_log.debug('[%s] Removing not evaluated: %d' % (self.searcher_name, len(self.population.actives_no_evaluated))) for entry_id in self.population.actives_no_evaluated: self.replace_by_random(entry_id, reason='no evaluated') self.print_status() duplicates = self.population.check_duplicates(self.population.ids_sorted(self.population.actives_evaluated)) for entry_id in duplicates: change = {'change': 'duplicate', 'to': duplicates[entry_id], 'reason': None} self.write_change(entry_id, change) self.replace_by_random(entry_id, reason='duplicate') pcm_log.info(' Duplicates identified and disabled: %d' % len(duplicates)) self.print_status(level='INFO') pcm_log.info(' Running one cycle for %s with %d candidates' % (self.searcher_name, len(self.actives_in_generation))) self.run_one() self.update_generation() print('Searcher ended after %d iterations' % self.current_generation) print('Best candidate: [%s] %s' % (best_member, self.population.str_entry(best_member)))
def run(self): irun = 0 score = INITIAL_SCORE dftb = DFTBplus() dftb.initialize(workdir=self.workdir, structure=self.structure, kpoints=self.kpoints) dftb.set_slater_koster(search_paths=self.slater_path) dftb.basic_input() dftb.driver['LatticeOpt'] = False # Decreasing the target_forces to avoid the final static # calculation of raising too much the forces after symmetrization dftb.driver['MaxForceComponent'] = self.target_forces dftb.driver['ConvergentForcesOnly'] = True dftb.driver['MaxSteps'] = 100 dftb.hamiltonian['MaxSCCIterations'] = 20 dftb.set_inputs() print('Launching DFTB+ with target force of %9.2E ' % dftb.driver['MaxForceComponent']) dftb.run() if self.waiting: dftb.runner.wait() while True: if dftb.runner is not None and dftb.runner.poll() is not None: pcm_log.info('Execution completed. Return code %d' % dftb.runner.returncode) stdo = read_dftb_stdout(filename=self.workdir + os.sep + 'dftb_stdout.log') good_forces, good_stress = self.relaxation_status() if 'max_force' in stdo: print('Converged: %s\t Max Force: %9.3e\t MaxForceComponent: %9.3e' % (stdo['ion_convergence'], stdo['max_force'], self.target_forces)) filename = dftb.workdir + os.sep + 'detailed.out' if not os.path.exists(filename): pcm_log.error('Could not find ' + filename) break if not good_forces and not good_stress: # This happens when all the SCC are completed without convergence dftb.driver['ConvergentForcesOnly'] = False else: dftb.driver['ConvergentForcesOnly'] = True score = self.quality(score) pcm_log.debug('Score : %d Good Forces: %s Good Stress: %s' % (score, good_forces, good_stress)) if score < 0: if good_forces and good_stress: pcm_log.debug('Convergence: Internals + Cell') dftb.driver['MovedAtoms'] = '1:-1' dftb.driver['LatticeOpt'] = True elif not good_forces and good_stress: pcm_log.debug('Convergence: Internals') dftb.driver['LatticeOpt'] = False dftb.driver['MovedAtoms'] = '1:-1' elif good_forces and not good_stress: pcm_log.debug('Convergence: Internals + Cell') dftb.driver['LatticeOpt'] = True dftb.driver['MovedAtoms'] = '1:-1' dftb.structure = read_geometry_gen(dftb.workdir + os.sep + 'geo_end.gen') # lets change the positions if the score have lowered to -10 if score == -10 and self.forced: dftb.structure.positions += 0.2 * np.random.rand(dftb.structure.natom, 3) - 0.1 dftb.structure.positions2reduced() dftb.structure.set_cell(1.1 * dftb.structure.cell) if score == -1 and self.forced: dftb.structure = dftb.structure.random_cell(dftb.structure.composition) print('RANDOM STRUCTURE') print(dftb.structure) score = INITIAL_SCORE dftb.structure.save_json(dftb.workdir + os.sep + 'structure_current.json') if self.symmetrize: dftb.structure = symmetrize(dftb.structure) self.structure = dftb.structure dftb.get_geometry() dftb.roll_outputs(irun) dftb.set_inputs() irun += 1 print('Launching DFTB+ with target force of %9.2E ' % dftb.driver['MaxForceComponent']) dftb.run() if self.waiting: dftb.runner.wait() else: pcm_log.debug('Final static calculation') dftb.structure = self.get_final_geometry() dftb.structure.save_json(dftb.workdir + os.sep + 'structure_final.json') if self.symmetrize: dftb.structure = symmetrize(dftb.structure) self.structure = dftb.structure dftb.get_geometry() dftb.roll_outputs(irun) dftb.options['CalculateForces'] = True dftb.driver = {} dftb.set_inputs() print('Launching DFTB+ with static evaluation of forces ') dftb.run() if self.waiting: dftb.runner.wait() while dftb.runner.poll() is None: dftb.run_status() time.sleep(10) print('Completed Static run') forces, stress, total_energy = self.get_forces_stress_energy() if stress is None or forces is None or total_energy is None: pcm_log.debug('Null Forces, Stress or Energy, relaxing and exiting') dftb.basic_input() dftb.driver['LatticeOpt'] = False # Decreasing the target_forces to avoid the final static # calculation of raising too much the forces after symmetrization dftb.driver['MaxForceComponent'] = 0.9 * self.target_forces dftb.driver['ConvergentForcesOnly'] = False dftb.driver['MaxSteps'] = 10 dftb.hamiltonian['MaxSCCIterations'] = 50 print(dftb.driver) dftb.set_inputs() dftb.run() if self.waiting: dftb.runner.wait() while dftb.runner.poll() is None: time.sleep(10) print('FINAL:', read_detailed_out(filename=filename)) forces, stress, total_energy = self.get_forces_stress_energy() if stress is None or forces is None or total_energy is None: pcm_log.debug('Again Null Forces, Stress or Energy, Randomizing Structure') dftb.structure = dftb.structure.random_cell(dftb.structure.composition) print('RANDOM STRUCTURE') print(dftb.structure) score = INITIAL_SCORE else: break else: break else: pcm_log.debug('ID: %s' % os.path.basename(self.workdir)) filename = dftb.workdir + os.sep + 'dftb_stdout.log' if os.path.exists(filename): stdo = read_dftb_stdout(filename=filename) print('Number of steps:', len(stdo['Geometry_Steps'])) if len(stdo['Geometry_Steps']) > 1: line = 'Energy behavior: ' prev_energy = stdo['Geometry_Steps'][0]['Total Energy']['value'] line += ' %7.3f ' % prev_energy for step in stdo['Geometry_Steps'][1:]: new_energy = step['Total Energy']['value'] if prev_energy > new_energy: line += '>' else: line += '<' prev_energy = new_energy finene = stdo['Geometry_Steps'][-1]['Total Energy']['value'] line += ' %7.3f' % finene print(line) time.sleep(10)