def get_restart_population(Optimizer): """ Function to generate a population from a folder containing existing structures. Inputs: Optimizer = structopt_stem Optimizer class object Outputs: pop = List of structopt_stem Individual class objects containing existing structures. """ logger = logging.getLogger(Optimizer.loggername) index1 = 0 Optimizer.output.write('Loading structures from old run\n') pop = [] for i in range(Optimizer.nindiv): logger.info('reading structure {0}'.format(Optimizer.files[i].name)) successflag = False try: indiv = read_xyz(Optimizer.files[i].name) successflag = True except IOError,e: logger.error('Not enough files in restart to generate population. Resetting nindiv to {0}'.format(i-1),exc_info=True) Optimizer.output.write('WARNING: Not enough files in restart to generate population\n') Optimizer.nindiv=i-1 Optimizer.output.write('Resetting nindiv = {0}\n'.format(Optimizer.nindiv)) Optimizer.output.flush() break except Exception,e: Optimizer.output.write('WARNING: Trouble reading file: {0}'.format(Optimizer.files[i].name),exc_info=True) Optimizer.output.write('Error: {0}'.format(e)) Optimizer.output.flush() Optimizer.nindiv-=1
def get_crystal_restart_indiv(Optimizer, indiv): """ Function to generate an structopt Individual class object containing a surface structure from a previously existing structure Inputs: Optimizer = structopt Optimizer class indiv = ASE Atoms object containing the previously existing structure Outputs: individ = structopt Individual class object containing surface structure data *** WARNING: This function is currently degenerate! *** """ crys = read_xyz(Optimizer.crysfile) #Recover cell from Structure Summary file # f = open(Optimizer.files[-1],'r') # sline = f.readline() # lines = f.readlines() # popbygen = [] # n=0 # for line in lines: # if 'Generation' in line: # if len(popbygen) != 0: # popbygen.append(genlist) # genlist = [] # else: # genlist.append(line) # f.close() cells = Optimizer.cryscell crys.set_cell(cells) individ=Individual(crys) return individ
def get_crystal_restart_indiv(Optimizer, indiv): """ Function to generate an structopt Individual class object containing a surface structure from a previously existing structure Inputs: Optimizer = structopt Optimizer class indiv = ASE Atoms object containing the previously existing structure Outputs: individ = structopt Individual class object containing surface structure data *** WARNING: This function is currently degenerate! *** """ crys = read_xyz(Optimizer.crysfile) #Recover cell from Structure Summary file # f = open(Optimizer.files[-1],'r') # sline = f.readline() # lines = f.readlines() # popbygen = [] # n=0 # for line in lines: # if 'Generation' in line: # if len(popbygen) != 0: # popbygen.append(genlist) # genlist = [] # else: # genlist.append(line) # f.close() cells = Optimizer.cryscell crys.set_cell(cells) individ = Individual(crys) return individ
def gen_solid(solidfile, solidcell, outfilename, calc=False, calcmeth=None): """Function to load a bulk solid from a file for use in Defect structure optimization Inputs: solidfile=String of filename to load solidcell=List/Matrix of cell parameters for ASE Atoms class outfilename=String of filename to write solid calc=False/calculator object for evaluating energy of solid calcmeth='VASP' or other method for calculating the energy of the solid Outputs: solid as ASE Atoms class energy and string if calc is not false """ try: sol = read_xyz(solidfile) except Exception as e1: try: sol = read(solidfile) except Exception as e2: raise RuntimeError('Encountered errror:' + repr(e1) + ' ' + repr(e2) + ' While trying to read solid file given as:' + repr(solidfile)) cell = solidcell sol.set_cell(cell) sol.set_pbc(True) #Evaluate pure Bulk structure if calc: cwd = os.getcwd() sol.set_calculator(calc) stro = '' try: if calcmeth == 'VASP': en = sol.get_potential_energy() calcb = Vasp(restart=True) sol = calcb.get_atoms() PureBulkEnpa = en / sol.get_number_of_atoms() else: OUT = sol.calc.calculate(sol) PureBulkEnpa = OUT['thermo'][-1][ 'pe'] / sol.get_number_of_atoms() sol = OUT['atoms'] sol.set_pbc(True) except: stro = 'WARNING: Unable to calculate energy of pure bulk solid' PureBulkEnpa = 0 os.chdir(cwd) # Write bulk file to directory write_xyz(outfilename, sol, PureBulkEnpa) return sol, PureBulkEnpa, stro else: # Write bulk file to directory write_xyz(outfilename, sol, 'Pure Bulk') return sol
def gen_solid(solidfile,solidcell,outfilename,calc=False,calcmeth=None): """Function to load a bulk solid from a file for use in Defect structure optimization Inputs: solidfile=String of filename to load solidcell=List/Matrix of cell parameters for ASE Atoms class outfilename=String of filename to write solid calc=False/calculator object for evaluating energy of solid calcmeth='VASP' or other method for calculating the energy of the solid Outputs: solid as ASE Atoms class energy and string if calc is not false """ try: sol = read_xyz(solidfile) except Exception as e1: try: sol = read(solidfile) except Exception as e2: raise RuntimeError('Encountered errror:'+repr(e1)+' '+repr(e2)+ ' While trying to read solid file given as:'+repr(solidfile)) cell = solidcell sol.set_cell(cell) sol.set_pbc(True) #Evaluate pure Bulk structure if calc: cwd = os.getcwd() sol.set_calculator(calc) stro = '' try: if calcmeth == 'VASP': en = sol.get_potential_energy() calcb = Vasp(restart=True) sol = calcb.get_atoms() PureBulkEnpa = en/sol.get_number_of_atoms() else: OUT = sol.calc.calculate(sol) PureBulkEnpa = OUT['thermo'][-1]['pe']/sol.get_number_of_atoms() sol = OUT['atoms'] sol.set_pbc(True) except: stro = 'WARNING: Unable to calculate energy of pure bulk solid' PureBulkEnpa = 0 os.chdir(cwd) # Write bulk file to directory write_xyz(outfilename,sol,PureBulkEnpa) return sol, PureBulkEnpa, stro else: # Write bulk file to directory write_xyz(outfilename,sol,'Pure Bulk') return sol
def get_surface_restart_indiv(Optimizer, indiv): """ Function to generate an structopt Individual class object containing a surface structure from a previously existing structure Inputs: Optimizer = structopt Optimizer class indiv = ASE Atoms object containing the previously existing structure Outputs: individ = structopt Individual class object containing surface structure data """ #Load surface structure surfs = read_xyz(Optimizer.surfacefile) cells = Optimizer.surfacecell surfs.set_cell(cells) surf.set_pbc([True,True,False]) top,bulks=find_top_layer(indiv,Optimizer.surftopthick) individ=Individual(top) individ.bulki=bulks.copy() individ.bulko=bulks.copy() return individ
def get_restart_population(Optimizer): """ Function to generate a population from a folder containing existing structures. Inputs: Optimizer = structopt_stem Optimizer class object Outputs: pop = List of structopt_stem Individual class objects containing existing structures. """ logger = logging.getLogger(Optimizer.loggername) index1 = 0 Optimizer.output.write('Loading structures from old run\n') pop = [] for i in range(Optimizer.nindiv): logger.info('reading structure {0}'.format(Optimizer.files[i].name)) successflag = False try: indiv = read_xyz(Optimizer.files[i].name) successflag = True except IOError, e: logger.error( 'Not enough files in restart to generate population. Resetting nindiv to {0}' .format(i - 1), exc_info=True) Optimizer.output.write( 'WARNING: Not enough files in restart to generate population\n' ) Optimizer.nindiv = i - 1 Optimizer.output.write('Resetting nindiv = {0}\n'.format( Optimizer.nindiv)) Optimizer.output.flush() break except Exception, e: Optimizer.output.write('WARNING: Trouble reading file: {0}'.format( Optimizer.files[i].name), exc_info=True) Optimizer.output.write('Error: {0}'.format(e)) Optimizer.output.flush() Optimizer.nindiv -= 1
def get_surface_indiv(Optimizer): """ Function to generate an structopt Individual class object containing as surface structure. Inputs: Optimizer = structopt Optimizer class Outputs: individ = structopt Individual class object containing surface structure data """ #Load surface structure surfs = read_xyz(Optimizer.surfacefile) cells = Optimizer.surfacecell surfs.set_cell(cells) surf.set_pbc([True, True, False]) #Find top layer top, bulks = find_top_layer(surfs, Optimizer.surftopthick) mutopto = Optimizer.mutation_options Optimizer.mutation_options = ['lattice_alteration_rdrd'] topind = top.copy() ind = moves_switch(topind, Optimizer) Optimizer.mutation_options = mutopto individ = Individual(ind) individ.bulki = bulks.copy() individ.bulko = bulks.copy() return individ
def get_surface_indiv(Optimizer): """ Function to generate an structopt Individual class object containing as surface structure. Inputs: Optimizer = structopt Optimizer class Outputs: individ = structopt Individual class object containing surface structure data """ #Load surface structure surfs = read_xyz(Optimizer.surfacefile) cells = Optimizer.surfacecell surfs.set_cell(cells) surf.set_pbc([True,True,False]) #Find top layer top,bulks=find_top_layer(surfs,Optimizer.surftopthick) mutopto = Optimizer.mutation_options Optimizer.mutation_options = ['lattice_alteration_rdrd'] topind = top.copy() ind = moves_switch(topind,Optimizer) Optimizer.mutation_options = mutopto individ = Individual(ind) individ.bulki = bulks.copy() individ.bulko = bulks.copy() return individ
def get_lattice_concentration(bulkfile,indivfile): """Function to identify the lattice concentration of atoms in a bulk structure compared to a structure with a defect. Inputs: bulkfile = filename for starting structure with original lattice atoms indivfile = filename for structure to compare Outputs: File: LatticeConcentration.txt in working directory. Includes summary of concentration of each atom type and vacancies ** Note: Currently limited to cubic structures ** """ # Load Bulk Solid File solid=read_xyz(bulkfile) # Get lattice sites for bulk #Identify nearest neighbor distance solid.set_pbc(True) distmin=[] for i in range(20): dist=[] for j in range(len(solid)): if i !=j: d = calc_dist(solid[i],solid[j]) dist.append(d) distmin.append(min(dist)) nndist=sum([one for one,x,y,z in distmin])/len(distmin) nnxd=sum([x for one,x,y,z in distmin])/len(distmin) nnyd=sum([y for one,x,y,z in distmin])/len(distmin) nnzd=sum([z for one,x,y,z in distmin])/len(distmin) solid.translate([nnxd/2.0,nnyd/2.0,nnzd/2.0]) # Get size of cell for pbc cell = numpy.maximum.reduce(solid.get_positions()) cell += [nnxd/2.0,nnyd/2.0,nnzd/2.0] # Initialize boxes nx=int(math.ceil(float(cell[0])/float(nnxd))) ny=int(math.ceil(float(cell[1])/float(nnyd))) nz=int(math.ceil(float(cell[2])/float(nnzd))) bxarray0=[[0,[]] for i in range(nx*ny*nz)] # Identify which atoms are in which box positions=solid.get_positions() for i in range(len(solid)): box=[math.floor(positions[i][0]/nnxd),math.floor(positions[i][1]/nnyd),math.floor(positions[i][2]/nnzd)] bxarray0[int((nx*ny)*(box[2])+nx*(box[1])+box[0])][0]+=1 bxarray0[int((nx*ny)*(box[2])+nx*(box[1])+box[0])][1]+=[solid[i].symbol] #Get types of atoms for bulk and bulk lattice concentration nlatsites=len(solid) syms =list(set([atm.symbol for atm in solid])) nsyms = [] for one in syms: numberofsym=len([atm for atm in solid if atm.symbol==one]) nsyms.append(float(numberofsym)/float(nlatsites)) concentbulk=zip(syms,nsyms) #Get Lattice sites for individual onlatcon=[[-1,concentbulk]] n=0 while True: try: indiv=read_xyz(indivfile,n) except: break indiv.translate([nnxd/2.0,nnyd/2.0,nnzd/2.0]) bxarray=[[0,[],[]] for i in range(nx*ny*nz)] positions=indiv.get_positions() # Wrap positions in individual to cell size for i in range(len(positions)): while positions[i][0] > cell[0]: positions[i][0]=positions[i][0]-cell[0] while positions[i][1] > cell[1]: positions[i][1]=positions[i][1]-cell[1] while positions[i][2] > cell[2]: positions[i][2]=positions[i][2]-cell[2] while positions[i][0] < 0: positions[i][0]=positions[i][0]+cell[0] while positions[i][1] < 0: positions[i][1]=positions[i][1]+cell[1] while positions[i][2] < 0: positions[i][2]=positions[i][2]+cell[2] for i in range(len(indiv)): box=[math.floor(positions[i][0]/nnxd),math.floor(positions[i][1]/nnyd),math.floor(positions[i][2]/nnzd)] bxarray[int((nx*ny)*(box[2])+nx*(box[1])+box[0])][0]+=1 bxarray[int((nx*ny)*(box[2])+nx*(box[1])+box[0])][1]+=[indiv[i].symbol] bxarray[int((nx*ny)*(box[2])+nx*(box[1])+box[0])][2]+=[i] # Get on-lattice concentration latsyms=[] for i in range(len(bxarray0)): if bxarray0[i][0]!=0: if len(bxarray[i][1])==1: latsyms.extend(bxarray[i][1]) elif len(bxarray[i][1]) >1: if bxarray[i][1][0] in bxarray[i][1]: latsyms.extend(bxarray0[i][1]) else: latsyms.append('Vacancy') reducedsyms=list(set(latsyms)) concenti=[] for one in reducedsyms: numberofsym=len([atm for atm in latsyms if atm==one]) concenti.append(float(numberofsym)/float(nlatsites)) concents=zip(reducedsyms,concenti) onlatcon.append([n,concents]) n+=1 maxlen=max([len(con) for n,con in onlatcon]) maxsyms=[con for n,con in onlatcon if len(con)==maxlen] symlist=[sym for sym,n in maxsyms[0]] output=open('LatticeConcentration.txt','a') output.write('Generation ') for one in symlist: output.write(repr(one)+' ') output.write('TotalSites \n') for n,con in onlatcon: output.write(repr(n)+' ') for sym in symlist: num=[count for atm,count in con if atm==sym] num=sum(num) output.write(repr(num)+' ') output.write(repr(nlatsites)+'\n') output.close()
def __init__(self, input, uselogger=True): if input: parameters = inp_out.read_parameter_input(input, uselogger) else: parameters = inp_out.read_parameter_input({'atomlist':[('Xx',1,0,0)],'structure':'Cluster'}, uselogger) self.__dict__.update(parameters) try: rank = MPI.COMM_WORLD.Get_rank() except: rank = 0 if 'stem' in parameters['fitness_scheme']: if rank == 0 : nk = self.stemcalc.parameters['Pixels'] self.stemcalc.psf = np.empty([nk,nk],dtype=float) # ZS try: fileobj = open(parameters['psf'],'r') except KeyError: fileobj = open('/home/usitguest/USIT/dropbox_app/PSF.txt', 'r') lines = fileobj.readlines() for x in range(0,nk): self.stemcalc.psf[x] = lines[x].split() fileobj.close() #self.stemcalc.psf = tools.StemCalc.get_probe_function(self.stemcalc.parameters) else: self.stemcalc.psf = None self.stemcalc.psf = MPI.COMM_WORLD.bcast(self.stemcalc.psf,root=0) if self.loggername: global logger logger = logging.getLogger(self.loggername) if self.restart_optimizer: try: rank = MPI.COMM_WORLD.Get_rank() except: rank = 0 if rank==0: logger.info('restarting output') outdict = inp_out.restart_output(self) self.__dict__.update(outdict) logger.info('Loading individual files') poplist = [] for indfile in self.population: ind = inp_out.read_individual(indfile) poplist.append(ind) self.population = poplist logger.info('Loading bests') bestlist = [] for bestfile in self.BESTS: ind = inp_out.read_individual(bestfile) bestlist.append(ind) self.BESTS = bestlist self.restart = True if self.structure == 'Defect': bulk = inp_out.read_xyz(self.solidfile) bulk.set_pbc(True) bulk.set_cell(self.solidcell) self.solidbulk = bulk.copy() else: self.solidbulk = None else: self.convergence = False self.generation = 0 self.Runtimes = [time.time()] self.Evaluations = list() self.CXs = list() self.Muts = list() self.cxattempts = 0 self.mutattempts = list() self.BESTS = list() self.genrep = 0 self.minfit = 0 self.convergence = False self.overrideconvergence = False self.population = list() self.calc = None self.static_calc = None
def get_lattice_concentration(bulkfile, indivfile): """Function to identify the lattice concentration of atoms in a bulk structure compared to a structure with a defect. Inputs: bulkfile = filename for starting structure with original lattice atoms indivfile = filename for structure to compare Outputs: File: LatticeConcentration.txt in working directory. Includes summary of concentration of each atom type and vacancies ** Note: Currently limited to cubic structures ** """ # Load Bulk Solid File solid = read_xyz(bulkfile) # Get lattice sites for bulk #Identify nearest neighbor distance solid.set_pbc(True) distmin = [] for i in range(20): dist = [] for j in range(len(solid)): if i != j: d = calc_dist(solid[i], solid[j]) dist.append(d) distmin.append(min(dist)) nndist = sum([one for one, x, y, z in distmin]) / len(distmin) nnxd = sum([x for one, x, y, z in distmin]) / len(distmin) nnyd = sum([y for one, x, y, z in distmin]) / len(distmin) nnzd = sum([z for one, x, y, z in distmin]) / len(distmin) solid.translate([nnxd / 2.0, nnyd / 2.0, nnzd / 2.0]) # Get size of cell for pbc cell = numpy.maximum.reduce(solid.get_positions()) cell += [nnxd / 2.0, nnyd / 2.0, nnzd / 2.0] # Initialize boxes nx = int(math.ceil(float(cell[0]) / float(nnxd))) ny = int(math.ceil(float(cell[1]) / float(nnyd))) nz = int(math.ceil(float(cell[2]) / float(nnzd))) bxarray0 = [[0, []] for i in range(nx * ny * nz)] # Identify which atoms are in which box positions = solid.get_positions() for i in range(len(solid)): box = [ math.floor(positions[i][0] / nnxd), math.floor(positions[i][1] / nnyd), math.floor(positions[i][2] / nnzd) ] bxarray0[int((nx * ny) * (box[2]) + nx * (box[1]) + box[0])][0] += 1 bxarray0[int((nx * ny) * (box[2]) + nx * (box[1]) + box[0])][1] += [solid[i].symbol] #Get types of atoms for bulk and bulk lattice concentration nlatsites = len(solid) syms = list(set([atm.symbol for atm in solid])) nsyms = [] for one in syms: numberofsym = len([atm for atm in solid if atm.symbol == one]) nsyms.append(float(numberofsym) / float(nlatsites)) concentbulk = zip(syms, nsyms) #Get Lattice sites for individual onlatcon = [[-1, concentbulk]] n = 0 while True: try: indiv = read_xyz(indivfile, n) except: break indiv.translate([nnxd / 2.0, nnyd / 2.0, nnzd / 2.0]) bxarray = [[0, [], []] for i in range(nx * ny * nz)] positions = indiv.get_positions() # Wrap positions in individual to cell size for i in range(len(positions)): while positions[i][0] > cell[0]: positions[i][0] = positions[i][0] - cell[0] while positions[i][1] > cell[1]: positions[i][1] = positions[i][1] - cell[1] while positions[i][2] > cell[2]: positions[i][2] = positions[i][2] - cell[2] while positions[i][0] < 0: positions[i][0] = positions[i][0] + cell[0] while positions[i][1] < 0: positions[i][1] = positions[i][1] + cell[1] while positions[i][2] < 0: positions[i][2] = positions[i][2] + cell[2] for i in range(len(indiv)): box = [ math.floor(positions[i][0] / nnxd), math.floor(positions[i][1] / nnyd), math.floor(positions[i][2] / nnzd) ] bxarray[int((nx * ny) * (box[2]) + nx * (box[1]) + box[0])][0] += 1 bxarray[int((nx * ny) * (box[2]) + nx * (box[1]) + box[0])][1] += [indiv[i].symbol] bxarray[int((nx * ny) * (box[2]) + nx * (box[1]) + box[0])][2] += [i] # Get on-lattice concentration latsyms = [] for i in range(len(bxarray0)): if bxarray0[i][0] != 0: if len(bxarray[i][1]) == 1: latsyms.extend(bxarray[i][1]) elif len(bxarray[i][1]) > 1: if bxarray[i][1][0] in bxarray[i][1]: latsyms.extend(bxarray0[i][1]) else: latsyms.append('Vacancy') reducedsyms = list(set(latsyms)) concenti = [] for one in reducedsyms: numberofsym = len([atm for atm in latsyms if atm == one]) concenti.append(float(numberofsym) / float(nlatsites)) concents = zip(reducedsyms, concenti) onlatcon.append([n, concents]) n += 1 maxlen = max([len(con) for n, con in onlatcon]) maxsyms = [con for n, con in onlatcon if len(con) == maxlen] symlist = [sym for sym, n in maxsyms[0]] output = open('LatticeConcentration.txt', 'a') output.write('Generation ') for one in symlist: output.write(repr(one) + ' ') output.write('TotalSites \n') for n, con in onlatcon: output.write(repr(n) + ' ') for sym in symlist: num = [count for atm, count in con if atm == sym] num = sum(num) output.write(repr(num) + ' ') output.write(repr(nlatsites) + '\n') output.close()
'Scale Factor': 0.00570113 } # calculate ref STEM image ''' fp = open('../archive_input_options.txt') data = fp.readlines() for line in range(len(data)): if "'psf':" in data[line]: parameters['psf'] = ast.literal_eval(data[line].strip())['psf'] if "'stem_ref':" in data[line]: parameters['stem_ref'] = ast.literal_eval(data[line].strip())['stem_ref'] ''' A = ConvStem(parameters=autostemparameters, calc_exp=False) try: atoms_ref = inp_out.read_xyz(parameters['stem_ref'], 0) except KeyError: atoms_ref = inp_out.read_xyz( '/home/usitguest/USIT/dropbox_app/STEM_ref', 0) nk = autostemparameters['Pixels'] A.psf = np.empty([nk, nk], dtype=float) try: fileobj = open(parameters['psf'], 'r') except KeyError: fileobj = open('/home/usitguest/USIT/dropbox_app/PSF.txt', 'r') lines = fileobj.readlines() for x in range(0, nk): A.psf[x] = lines[x].split() fileobj.close()
aber=[[0,0],[0,0],[22.56,-20.1],[22.08,-7.5],[0.1198,0],[0.9018,-170.1],[0.04964,20.9],[28.43,-120.6],[11.84,153.8],[8.456,76.1],[0.622,0],[2.811,-125.5]] autostemparameters={'Electron energy': 200,'Spherical aberration': 1.4,'Defocus':0,'Aperture semiangle': 24.5,'Source size': 0.882,'Slice size': 25.0,'Pixels':976,'Chromatic aberration Coefficient':1.4,'Delta E':0.73,'aber':aber,'Scale Factor':0.00570113} # calculate ref STEM image ''' fp = open('../archive_input_options.txt') data = fp.readlines() for line in range(len(data)): if "'psf':" in data[line]: parameters['psf'] = ast.literal_eval(data[line].strip())['psf'] if "'stem_ref':" in data[line]: parameters['stem_ref'] = ast.literal_eval(data[line].strip())['stem_ref'] ''' A = ConvStem(parameters=autostemparameters,calc_exp=False) try: atoms_ref=inp_out.read_xyz(parameters['stem_ref'],0) except KeyError: atoms_ref=inp_out.read_xyz('/home/usitguest/USIT/dropbox_app/STEM_ref',0) nk = autostemparameters['Pixels'] A.psf = np.empty([nk,nk],dtype=float) try: fileobj = open(parameters['psf'], 'r') except KeyError: fileobj = open('/home/usitguest/USIT/dropbox_app/PSF.txt', 'r') lines = fileobj.readlines() for x in range(0,nk): A.psf[x] = lines[x].split() fileobj.close() STEM_ref = A.get_image(A.psf, atoms_ref, autostemparameters['Slice size'], autostemparameters['Pixels'])