def submit_job(self,job): pad = self.enter(job.directory,msg=job.internal_id) if job.subfile==None: self.error('submission file not specified for job') elif not os.path.exists(job.subfile): self.error('job submission file was not written prior to submission\n submission file: '+os.path.join(job.directory,job.subfile)) #end if command = self.sub_launcher+' '+job.subfile if self.generate_only: self.log(pad+'Would have executed: '+command) job.status = job.states.running process = obj() process.job = job self.processes[job.internal_id] = process else: self.log(pad+'Executing: '+command) job.status = job.states.running process = obj() process.job = job out,err = Popen(command,shell=True,stdout=PIPE,stderr=PIPE,close_fds=True).communicate() output=out+'\n'+err pid = self.read_process_id(output) if pid is None: self.error('process id could not be determined from submission output\n output:\n'+output) #end if #pid = 'fakepid_'+str(job.internal_id) job.system_id = pid self.processes[pid] = process #end if self.leave()
def read_outcar_charge_mag(vlines,odata,token): ion = obj(s=[],p=[],d=[],tot=[]) total = obj() vlines.advance_token(token) vlines.advance(4) prev_end = False n=0 for line in vlines.remainder(): n+=1 if prev_end: break #end if if line[0]=='-': prev_end = True else: vals = array(line.split()[1:],dtype=float) ion.s.append(vals[0]) ion.p.append(vals[1]) ion.d.append(vals[2]) ion.tot.append(vals[3]) #end if #end for for channel,vals in ion.iteritems(): ion[channel] = array(vals,dtype=float) #end for vlines.advance(n) vals = array(line.split()[1:],dtype=float) total.s = vals[0] total.p = vals[1] total.d = vals[2] total.tot = vals[3] return ion,total
def post_process_read(self,parent): if 'atomic_species' in parent: keys = self.keys() for alias,name in self.atomic_variables.iteritems(): has_var = False avals = obj() akeys = [] for key in keys: if key.startswith(name): has_hubbard_u = True akeys.append(key) index = int(key.replace(name,'').strip('()')) avals[index] = self[key] #end if #end for if has_var: for key in akeys: del self[key] #end for atoms = parent.atomic_species.atoms value = obj() for i in range(len(atoms)): index = i+1 if index in avals: value[atoms[i]] = avals[i+1] #end if #end for self[alias] = value
def class_init(cls): for kw_field in cls.kw_fields: if not kw_field in cls.__dict__: cls.__dict__[kw_field] = set() #end if #end for #cls.check_consistency() cls.scalar_keywords = set() for scalar_field in cls.kw_scalars: cls.scalar_keywords |= cls.__dict__[scalar_field] #end for cls.array_keywords = set() for array_field in cls.kw_arrays: cls.array_keywords |= cls.__dict__[array_field] #end for cls.keywords = cls.scalar_keywords | cls.array_keywords cls.type = obj() cls.read_value = obj() cls.write_value = obj() cls.assign_value = obj() for type in cls.kw_scalars + cls.kw_arrays: for name in cls.__dict__[type]: cls.type[name] = type cls.read_value[name] = read_value_functions[type] cls.write_value[name] = write_value_functions[type] cls.assign_value[name] = assign_value_functions[type]
def get_result(self,result_name,sim): result = obj() input = self.input control = input.control prefix = 'pwscf' outdir = './' if 'prefix' in control: prefix = control.prefix #end if if 'outdir' in control: outdir = control.outdir #end if if outdir.startswith('./'): outdir = outdir[2:] #end if if result_name=='charge_density': result.location = os.path.join(self.locdir,outdir,prefix+'.save','charge-density.dat') elif result_name=='orbitals': result.location = os.path.join(self.locdir,outdir,prefix+'.wfc1') elif result_name=='structure': pa = self.load_analyzer_image() structs = pa.structures pos,atoms = structs[len(structs)-1].tuple('positions','atoms') scale = self.input.system['celldm(1)'] pos = scale*array(pos) atoms = array(atoms) result.structure = obj( positions = pos, atoms = atoms ) else: self.error('ability to get result '+result_name+' has not been implemented') #end if return result
def __init__(self): modes = nexus_core.modes self.persistent_modes = set([modes.submit,modes.all]) self.simulations = obj() self.cascades = obj() self.progressing_cascades = obj() self.operations = []
def pot_info(self): pot_info = obj() if len(self.pseudos)>0: pots = self.pseudos elif self.filepath!=None and self.files!=None: pots = obj() for file in self.files: pots.append(open(os.path.join(self.filepath,file),'r').read()) #end for else: pots = obj() #end if for i in range(len(pots)): pot = pots[i] n1 = pot.find('\n') n2 = pot.find('\n',n1+1) Zval = int(float(pot[n1:n2].strip())) n = pot.find('VRHFIN') n1 = pot.find('=',n+1)+1 n2 = pot.find(':',n1+1) element = pot[n1:n2].strip() pot_info.append(obj(Zval=Zval,element=element)) #end for return pot_info
def get_result(self, result_name, sim): result = obj() input = self.input control = input.control prefix = "pwscf" outdir = "./" if "prefix" in control: prefix = control.prefix # end if if "outdir" in control: outdir = control.outdir # end if if outdir.startswith("./"): outdir = outdir[2:] # end if if result_name == "charge_density": result.location = os.path.join(self.locdir, outdir, prefix + ".save", "charge-density.dat") result.spin_location = os.path.join(self.locdir, outdir, prefix + ".save", "spin-polarization.dat") elif result_name == "orbitals": result.location = os.path.join(self.locdir, outdir, prefix + ".wfc1") elif result_name == "structure": pa = self.load_analyzer_image() structs = pa.structures pos, atoms = structs[len(structs) - 1].tuple("positions", "atoms") scale = self.input.system["celldm(1)"] pos = scale * array(pos) atoms = array(atoms) result.structure = obj(positions=pos, atoms=atoms) else: self.error("ability to get result " + result_name + " has not been implemented") # end if return result
def readfile(self,filepath): text = TextFile(filepath) Zatom,Z = text.read_tokens('Atomic number and pseudo-charge',int,float) if Zatom>len(pt.simple_elements): self.error('element {0} is not in the periodic table') #end if element = pt.simple_elements[Zatom].symbol units = text.read_tokens('Energy units',str) if not units in self.unitmap: self.error('units {0} unrecognized from casino PP file {1}'.format(units,filepath)) #end if lloc = text.read_tokens('Angular momentum of local component',int) ngrid = text.read_tokens('Number of grid points',int) i = text.find_line('R(i)',exit=True)+1 r = empty((ngrid,),dtype=float) for ir in xrange(ngrid): r[ir] = float(text.lines[i]) i+=1 #end for r=r[1:] p = obj() while i<len(text.lines): line = text.lines[i] if 'potential' in line: eqloc = line.find('=') if eqloc==-1: self.error('"=" not found in potential line') #end if l = int(line[eqloc+1]) i+=1 if i+ngrid>len(text.lines): self.error('potentials in file {0} are not the right length'.format(filepath)) #end if v = empty((ngrid,),dtype=float) for ir in xrange(ngrid): v[ir] = float(text.lines[i]) i+=1 #end for p[l] = v[1:]/r #end if #end while for l in p.keys(): p[l] = convert(p[l],self.unitmap[units],self.energy_units) #end for self.set( element = element, type = 'Trail-Needs', Z = Z, r = r, potentials = p, pp = obj( Zatom = Zatom, Z = Z, units = units, lloc = lloc, ngrid = ngrid ) )
def uncontract(self,tol=1e-3): if self.uncontracted(): return #end if lbasis = self.lbasis() self.basis.clear() for l in self.lset_full: if l in lbasis: exponents = [] lbas = lbasis[l] for n in xrange(len(lbas)): uterms = lbas[n].terms for i in xrange(len(uterms)): expon = uterms[i].expon if len(exponents)==0: exponents = array([expon],dtype=float) elif abs(exponents-expon).min()>tol: exponents = array(list(exponents)+[expon],dtype=float) #end if #end for #end for for expon in exponents: cterms = obj() cterms.append(obj(expon=expon,coeff=1.0)) bf = obj(l=l,scale=array([1.0]),terms=cterms) self.basis.append(bf)
def morse_fit_fine(r,E,p0=None,rfine=None,both=False,jackknife=False,minimizer=least_squares,capture=None): if rfine is None: rfine = linspace(r.min(),r.max(),400) #end if auxfuncs = obj( Efine = (morse,[None,rfine]) ) auxres = obj() res = morse_fit(r,E,p0,jackknife,minimizer,auxfuncs,auxres,capture) if not jackknife: pf = res else: pf,pmean,perror = res #end if Efine = morse(pf,rfine) if not jackknife: if not both: return Efine else: return pf,Efine #end if else: Emean,Eerror = auxres.Efine if not both: return Efine,Emean,Eerror else: return pf,pmean,perror,Efine,Emean,Eerror
def __init__(self, arg0=None, prefix=None, analyze=False, exit=False, **outfilenames): self.info = obj(exit=exit, path=None, input=None, prefix=None, files=obj(), initialized=False) infile = None if isinstance(arg0, Simulation): sim = arg0 infile = os.path.join(sim.locdir, sim.infile) else: infile = arg0 # end if if infile != None: info = self.info info.path = os.path.dirname(infile) info.input = GamessInput(infile) infilename = os.path.split(infile)[1] if prefix is None: prefix = infilename.rsplit(".", 1)[0] # end if info.prefix = prefix files = info.files for file, unit in GamessInput.file_units.iteritems(): files[file.lower()] = "{0}.F{1}".format(prefix, str(unit).zfill(2)) # end for files.input = infilename files.output = "{0}.out".format(prefix) for name, filename in outfilenames: if name in files: files[name] = filename else: self.error("unknown GAMESS file: {0}".format(name)) # end if # end for info.initialized = True if analyze: self.analyze()
def __init__(self): # variables determined by self modes = self.modes self.persistent_modes = set([modes.submit, modes.all]) self.simulations = obj() self.cascades = obj() self.progressing_cascades = obj() self.operations = []
def from_list_rep(self,list_rep): self.local = list_rep[0][0] if len(list_rep)>1: boxes = obj() for units,rmin,rmax,depth in list_rep[2:]: boxes.append(obj(units=units,rmin=rmin,rmax=rmax,depth=depth)) #end for self.boxes = boxes
def plot_trace(self,quantity,style='b-',offset=0,source='scalar',mlabels=True, mlines=True,show=True,alloff=False): mlabels &= not alloff mlines &= not alloff show &= not alloff shw = show offset = int(offset) id = self.info.input.get('project').id sdata = obj() series = sorted(self.qmc.keys()) q = [] soffset = offset for s in series: qmc = self.qmc[s] method = qmc.info.method if source=='scalar' or method=='vmc': src = qmc.scalars.data elif source=='dmc': src = qmc.dmc.data else: self.error('invalid source: '+source) #end if if quantity in src: qn = list(src[quantity]) else: qn = len(src.LocalEnergy)*[0] #end if q.extend(qn) sdata[s] = obj( mlab = method+' '+str(s), mloc = soffset + len(qn)/2, line_loc = soffset + len(qn)-1 ) soffset += len(qn) #end for q = array(q) qmin = q.min() qmax = q.max() mlabel_height = qmin + .8*(qmax-qmin) if shw: figure() #end if plot(offset+arange(len(q)),q,style,label=id) for s in series: sd = sdata[s] if mlabels: text(sd.mloc,mlabel_height,sd.mlab) #end if if mlines: plot([sd.line_loc,sd.line_loc],[qmin,qmax],'k-') #end if #end for if shw: title('{0} vs series for {1}'.format(quantity,id)) xlabel('blocks') ylabel(quantity) legend() show()
def correct(self,*corrections): corrkey='' for corr in corrections: corrkey+=corr+'_' #end for corrkey=corrkey[:-1] if set(corrections)>set(self.corrections.keys()): self.warn('correction '+corrkey+' is unknown and cannot be applied') return #end if if not 'data' in self: self.warn('correction '+corrkey+' cannot be applied because data is not present') return #end if varname = 'LocalEnergy_'+corrkey if varname in self and varname in self.data: return #end if corrvars = ['LocalEnergy'] signs = [1] for corr in corrections: for var,sign in self.corrections[corr].iteritems(): corrvars.append(var) signs.append(sign) #end for #end for missing = list(set(corrvars)-set(self.data.keys())) if len(missing)>0: self.warn('correction '+corrkey+' cannot be applied because '+str(missing)+' are missing') return #end if le = self.data.LocalEnergy E,E2 = 0*le.value,0*le.value_squared n = len(corrvars) for i in range(n): ed = self.data[corrvars[i]] e,e2 = ed.value,ed.value_squared s = signs[i] E += s*e E2 += e2 for j in range(i+1,n): eo = self.data[corrvars[j]].value so = signs[j] E2 += 2*s*e*so*eo #end for #end for val = obj(value=E,value_squared=E2) self.data[varname] = val nbe = self.info.nblocks_exclude (mean,var,error,kappa)=simstats(val.value[nbe:,...].ravel()) self[varname] = obj( mean = mean, variance = val.value_squared[nbe:,...].mean()-mean**2, sample_variance = var, error = error, kappa = kappa )
def __init__(self,**kwargs): #user specified variables self.path = None #directory where sim will be run self.job = None #Job object for machine self.dependencies = obj() #Simulation results on which sim serially depends #variables determined by self self.identifier = self.generic_identifier self.simid = Simulation.sim_count Simulation.sim_count+=1 self.files = set() self.app_name = self.application self.app_props = list(self.application_properties) self.sim_image = self.sim_imagefile self.input_image = self.input_imagefile self.analyzer_image = self.analyzer_imagefile self.image_dir = self.image_directory self.input = self.input_type() self.system = None self.dependents = obj() self.created_directories = False self.got_dependencies = False self.setup = False self.sent_files = False self.submitted = False self.finished = False self.failed = False self.got_output = False self.analyzed = False self.subcascade_finished = False self.dependency_ids = set() self.wait_ids = set() self.block = False self.block_subcascade = False self.skip_submit = nexus_core.skip_submit self.force_write = False self.loaded = False self.ordered_dependencies = [] self.process_id = None self.infile = None self.outfile = None self.errfile = None #variables determined by derived classes self.outputs = None #object representing output data # accessed by dependents when calling get_dependencies self.set(**kwargs) self.set_directories() self.set_files() self.propagate_identifier() if len(kwargs)>0: self.init_job() #end if self.post_init() if self.system!=None: self.system.check_folded_system()
def __init__(self,*args,**kwargs): # some args are # graph_name = 'G' # graph_type in ('graph','digraph') self.graph = self.pydot_type(*args,**kwargs) self.name = self.graph.get_name() self.nodes = obj() self.edges = obj() self.subgraphs = obj()
def add_density( self, cell, density, name="density", corner=None, grid=None, centered=False, add_ghost=False, transpose=False ): if corner is None: corner = zeros((3,), dtype=float) # end if if grid is None: grid = density.shape # end if grid = array(grid, dtype=int) corner = array(corner, dtype=float) cell = array(cell, dtype=float) density = array(density, dtype=float) density.shape = tuple(grid) if centered: # shift corner by half a grid cell to center it dc = 0.5 / grid dc = dot(dc, cell) corner += dc # end if if add_ghost: # add ghost points to make a 'general' xsf grid g = grid # this is an extra shell of points in PBC d = density grid = g + 1 density = zeros(tuple(grid), dtype=float) density[: g[0], : g[1], : g[2]] = d[:, :, :] # volume copy density[-1, : g[1], : g[2]] = d[0, :, :] # face copies density[: g[0], -1, : g[2]] = d[:, 0, :] density[: g[0], : g[1], -1] = d[:, :, 0] density[-1, -1, : g[2]] = d[0, 0, :] # edge copies density[-1, : g[1], -1] = d[0, :, 0] density[: g[0], -1, -1] = d[:, 0, 0] density[-1, -1, -1] = d[0, 0, 0] # corner copy # end if if transpose: # shift from row major to column major g = grid d = density density = zeros((d.size,)) n = 0 for k in xrange(g[2]): for j in xrange(g[1]): for i in xrange(g[0]): density[n] = d[i, j, k] n += 1 # end for # end for # end for density.shape = tuple(grid) # end if self.data = obj() self.data[3] = obj() self.data[3][name] = obj() self.data[3][name][name] = obj(grid=grid, corner=corner, cell=cell, values=density)
def load_cascades(self): cascades = obj() progressing_cascades = obj() for cid,cascade in self.cascades.iteritems(): rc = cascade.reconstruct_cascade() cascades[rc.simid] = rc progressing_cascades[rc.simid] = rc #end for self.cascades = cascades self.progressing_cascades = progressing_cascades
def from_list_rep(self,list_rep): self.symbol = list_rep[0][0] self.rorbs = list_rep[1][0] ref = obj() n = 0 for nlm,occ,eig in list_rep[2:]: ref[n] = obj(nlm=nlm,occ=occ,eig=eig) n+=1 #end for self.ref = ref
def __init__(self,fpath,verbose=False,view=False): HDFglobals.view = view if verbose: print ' Initializing HDFreader' self.fpath=fpath if verbose: print ' loading h5 file' try: self.hdf = h5py.File(fpath,'r') except IOError: self._success = False self.hdf = obj(obj=obj()) else: self._success = True #end if if verbose: print ' converting h5 file to dynamic object' #convert the hdf 'dict' into a dynamic object self.nlevels=1 self.ilevel=0 # Set the current hdf group self.obj = HDFgroup() self.cur=[self.obj] self.hcur=[self.hdf] if self._success: cur = self.cur[self.ilevel] hcur = self.hcur[self.ilevel] for kr,v in hcur.iteritems(): k=cur._escape_name(kr) if valid_variable_name(k): vtype = str(type(v)) if vtype in HDFreader.datasets: self.add_dataset(cur,k,v) elif vtype in HDFreader.groups: self.add_group(hcur,cur,k,v) else: print 'hdfreader error: encountered invalid type: '+vtype sys.exit() #end if else: print 'hdfreader warning: attribute '+k+' is not a valid variable name and has been ignored' #end if #end for #end if if verbose: print ' end HDFreader Initialization' return
def lbasis(self): lbasis = obj() for n in range(len(self.basis)): bf = self.basis[n] l = bf.l if l not in lbasis: lbasis[l] = obj() #end if lbasis[l].append(bf) #end for return lbasis
def add_to_image(self, image, name, value): if image is None: self[name] = value else: if "images" not in self: self.images = obj() # end if if not image in self.images: self.images[image] = obj() # end if self.images[image][name] = value
def analyze_outcar(self,outcar): if not os.path.exists(outcar): self.error('outcar file {0} does not exist'.format(outcar)) #end if oc = open(outcar,'r') lines = oc.read().splitlines() oc.close() del oc # gather initialization lines init = [] n = 0 for line in lines: if len(line)>0 and line[0]=='-' and 'Iteration' in line: break #end if init.append(line) n+=1 #end for # gather lines for each iteration ion_steps = obj() for line in lines[n:]: if len(line)>0 and line[0]=='-' and 'Iteration' in line: iteration = [] inum,enum = line.strip(' -Iteration)').split('(') inum = int(inum) enum = int(enum) if not inum in ion_steps: ion_steps[inum] = obj() #end if ion_steps[inum][enum] = OutcarData(lines=iteration) #end if iteration.append(line) #end for del lines del n # read data from each iteration if len(ion_steps)>0: imax = array(ion_steps.keys(),dtype=int).max() for inum,ion_step in ion_steps.iteritems(): ilast = inum==imax if len(ion_step)>0: emax = array(ion_step.keys(),dtype=int).max() for enum,elec_step in ion_step.iteritems(): elast = enum==emax elec_step.read(ilast,elast,all=False) if ilast and elast: self.transfer_from(elec_step) #end if #end for #end if #end for #end if self.ion_steps = ion_steps
def read_contents(self,lines): atoms = [] masses = obj() pseudopotentials = obj() for l in lines: tokens = l.split() atom = tokens[0] atoms.append(tokens[0]) masses[atom] = float(tokens[1]) pseudopotentials[atom] = tokens[2] #end for self.add(atoms=atoms,masses=masses,pseudopotentials=pseudopotentials)
def read_contents(self,lines): tokens = lines[0].split() self.ncontraints = int(tokens[0]) if len(tokens)>1: self.tolerance = float(tokens[1]) #end if self.collective_vars = obj() for i in range(len(lines)-1): tokens = lines[i+1].split() collv = obj() collv.type = tokens[0] collv.parameters = array(tokens[1:],dtype=float64) self.collective_vars[i] = collv
def __init__(self): self._name='' self._parent=None self._elements=obj() self._texts=obj() self._attributes=obj() self._element_counts=obj() self._ntexts=0 self._escape_names=None #self._escape_names=set(dict(getmembers(self)).keys()) | set(keyword.kwlist) self._escape_names=set(keyword.kwlist) return
def read_outcar_accounting(vlines,odata): time = obj() memory = obj() vlines.advance_token('General timing and accounting') vlines.advance(2) time.cpu = float(vlines.advance_line(1).split()[-1]) time.user = float(vlines.advance_line(1).split()[-1]) time.system = float(vlines.advance_line(1).split()[-1]) time.elapsed = float(vlines.advance_line(1).split()[-1]) vlines.advance(1) memory.maximum = float(vlines.advance_line(1).split()[-1]) memory.average = float(vlines.advance_line(1).split()[-1]) odata.time = time odata.memory = memory
def load_cascades(self): self.dlog("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~load cascades", n=1) cascades = obj() progressing_cascades = obj() for cid, cascade in self.cascades.iteritems(): self.dlog("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~reconstruct cascade", n=1) rc = cascade.reconstruct_cascade() self.dlog("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~end reconstruct cascade", n=1) cascades[rc.simid] = rc progressing_cascades[rc.simid] = rc # end for self.cascades = cascades self.progressing_cascades = progressing_cascades self.dlog("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~end load cascades", n=1)
def get_result(self,result_name,sim): result = obj() input = self.input wfn_file,ptcl_file = self.list_output_files() if result_name=='orbitals': result.location = os.path.join(self.locdir,wfn_file) if self.input.hdf5==True: orbfile = self.get_prefix()+'.orbs.h5' result.orbfile = os.path.join(self.locdir,orbfile) #end if elif result_name=='particles': result.location = os.path.join(self.locdir,ptcl_file) else: self.error('ability to get result '+result_name+' has not been implemented') #end if return result
def contracted_basis_size(self): bcount = obj() for bf in self.basis: l = bf.l if l not in bcount: bcount[l] = 0 #end if bcount[l] += 1 #end for bs = '' for l in self.lset_full: if l in bcount: bs += str(bcount[l]) + l #end if #end for return bs
def init_job_extra(self): # gamess seems to need lots of environment variables to run properly # nearly all of these are names of output/work files # setup the environment to run gamess if not isinstance(self.ericfmt, str): self.error( 'you must set ericfmt with settings() or Gamess.settings()') #end if env = obj() for file, unit in GamessInput.file_units.iteritems(): env[file] = '{0}.F{1}'.format(self.identifier, str(unit).zfill(2)) #end for env.INPUT = self.infile env.ERICFMT = self.ericfmt env.MCPPATH = self.mcppath self.job.set_environment(**env)
def prim_expons(self): if self.contracted(): self.error( 'cannot find primitive gaussian expons because basis is contracted' ) #end if lbasis = self.lbasis() gexpon = obj() for l, lbas in lbasis.items(): e = [] for n in range(len(lbas)): e.append(lbas[n].terms[0].expon) #end for gexpon[l] = array(e, dtype=float) #end for return gexpon
def prim_widths(self): if self.contracted(): self.error( 'cannot find primitive gaussian widths because basis is contracted' ) #end if lbasis = self.lbasis() gwidth = obj() for l, lbas in lbasis.items(): w = [] for n in range(len(lbas)): w.append(1. / sqrt(2. * lbas[n].terms[0].expon)) #end for gwidth[l] = array(w, dtype=float) #end for return gwidth
class SystemGroup(KeywordSpecGroup): keywords = set(['mwords','memddi','timlim','parall','kdiag','corefl', 'baltyp','mxseq2','mxseq3','nodext','iosmp','modio' , 'memory']) integers = set(['mwords','memddi','kdiag','mxseq2','mxseq3','modio','memory']) reals = set(['timlim']) bools = set(['parall','corefl']) strings = set(['baltyp']) arrays = set(['nodext','iosmp']) allowed_values = obj( kdiag = set([0,1,2,3]), baltyp = set(['slb','dlb','loop','nxtval']), modio = set([1,2,4,8,15]), )
def finalize(self): keys = list(self.keys()) enums = obj() for k in keys: v = self[k] if isinstance(k,str) and '.' in k and k.split('.',1)[1].isdigit(): name,index = k.split('.',1) index = int(index) if not name in enums: enums[name] = QEXML() #end if enums[name][index] = v del self[k] continue #end if if isinstance(v,QEXML): if len(set(v.keys())-self.array_keys)==0: a = array(v._value) if len(a)==1: a = a[0] elif 'columns' in v and v.size%v.columns==0: a.shape = v.size/v.columns,v.columns #end if self[k] = a else: v.finalize() #end if #end if #end for for k,v in enums.iteritems(): self[k] = v v.finalize() #end for if len(self._value)==0: del self._value elif not 'value' in self: if len(self._value)==1: self.value = self._value[0] else: self.value = array(self._value) #end if del self._value else: if len(self._value)==1: self._value = self._value[0] else: self._value = array(self._value)
def __init__(self,structure=None,net_charge=0,net_spin=0,particles=None,**valency): self.pseudized = False if structure is None: self.structure = Structure() else: self.structure = structure #end if if particles is None: self.particles = Particles() else: self.particles = particles.copy() #end if self.folded_system = None if self.structure.folded_structure!=None: vratio = structure.volume()/structure.folded_structure.volume() ncells = int(round(vratio)) if abs(vratio-ncells)>1e-4: self.error('volume of system does not divide evenly into folded system') #end if if net_charge%ncells!=0: self.error('net charge of system does not divide evenly into folded system') #end if if isinstance(net_spin,str): net_spin_fold = net_spin elif net_spin%ncells!=0: self.error('net_spin of system does not divide evenly into folded system') else: net_spin_fold = net_spin/ncells #end if self.folded_system = PhysicalSystem( structure = structure.folded_structure, net_charge = net_charge/ncells, net_spin = net_spin_fold, particles = particles, **valency ) #end if self.valency_in = obj(**valency) self.net_charge_in = net_charge self.net_spin_in = net_spin self.update_particles(clear=False) self.check_folded_system()
def get_result(self, result_name, sim): result = obj() analyzer = self.load_analyzer_image() if result_name == 'orbitals': outfiles = self.input.get_output_info(list=False) result.set(dir=self.locdir, h5file=outfiles.h5, qmcfile=outfiles.qmc) elif result_name == 'jastrow': result.set(rcut=analyzer.find_rcut(qcut=1e-3), B=1. / analyzer.moment(n=1)) else: self.error('ability to get result ' + result_name + ' has not been implemented') #end if del analyzer return result
def get_result(self, result_name, sim): # optional # only necessary if another simulation depends on this one # e.g. # other_sim.depends(template_simulation_sim,'orbitals') or similar # if you don't want to implement it, uncomment the line below #self.not_implemented() result = obj() input = self.input #analyzer = self.load_analyzer_image() # package information about a result/product in the result object # for example, if orbitals are requested, # the path to the orbital file might be provided: # result.orbital_file = '/path/to/orbital/file' return result
def get_result(self, result_name, sim): result = obj() analyzer = self.load_analyzer_image() if result_name == 'jastrow': if not 'results' in analyzer or not 'optimization' in analyzer.results: self.error( 'analyzer did not compute results required to determine jastrow' ) #end if opt_file = str(analyzer.results.optimization.optimal_file) result.opt_file = os.path.join(self.locdir, opt_file) else: self.error('ability to get result ' + result_name + ' has not been implemented') #end if del analyzer return result
def resolve_file_collisions(self): self.log('checking for file collisions',n=1) entry_order = obj() def set_entry_order(sim,entry_order): locdir = sim.locdir if not locdir in entry_order: entry_order[locdir] = [sim] else: entry_order[locdir].append(sim) #end if #end def set_entry_order self.traverse_cascades(set_entry_order,entry_order) any_collisions = False collpath = '' for path,simlist in entry_order.iteritems(): if len(simlist)>1: #raise an error if any in/out/err files will collide filespace = dict() for sim in simlist: if not sim.allow_overlapping_files: files = sim.list('infile','outfile','errfile') for f in files: if f not in filespace: filespace[f] = [sim] else: filespace[f].append(sim) #end if #end for #end if #end for for f,sims in filespace.iteritems(): if len(sims)>1 and f!=None: any_collisions = True msg = 'collision: file '+f+' is overwritten by ' for sim in sims: msg +=str(sim.identifier)+' '+str(sim.simid)+',' #end for self.log(msg[:-1],n=2) collpath = path #end if #end for #end if #end for if any_collisions: self.error('file collisions found in directory\n '+path+'\n set a unique identifier for each simulation')
class Optinfo(Section): variables = obj(qcuts=list, bessels=list) def list_rep(self): list_rep = zip(self.qcuts, self.bessels) return list_rep #end def list_rep def from_list_rep(self, list_rep): qc = [] bs = [] for q, b in list_rep: qc.append(q) bs.append(b) #end for self.qcuts = qc self.bessels = bs
def incorporate_user_info(self, infoin): info = obj(**infoin) vars = set(info.keys()) invalid = vars - self.allowed_user_info if len(invalid) > 0: self.error( 'invalid inputs encountered in incorporate_user_info\n allowed inputs: {0}\n invalid inputs: {1}' .format(list(self.allowed_user_info), list(invalid))) #end if if 'app_directories' in info: ad = info.app_directories if not isinstance(ad, dict) and not isinstance(ad, obj): self.error( 'app_directories must be of type dict or obj\n you provided ' + ad.__class__.__name__) #end if #end if self.transfer_from(info)
class BasisGroup(KeywordSpecGroup): keywords = set([ 'gbasis','ngauss','ndfunc','npfunc','diffsp','diffs', 'polar' ,'split2','split3','basnam','extfil' ]) integers = set(['ngauss','ndfunc','nffunc']) bools = set(['diffsp','diffs','extfil']) strings = set(['gbasis','polar']) arrays = set(['split2','split3','basname']) allowed_values = obj( #gbasis = set(['sto','n21','n31','n311','g3l','g3lx','mini','midi','dzv', # 'dh','tzv','mc']) # many others ndfunc = set([0,1,2,3]), nffunc = set([0,1]), polar = set(['common','popn31','popn311','dunning','huzinaga','hondo7']), )
def __init__(self): self.methods = set(['opt', 'vmc', 'dmc', 'rmc']) self.data_sources = set( ['scalar', 'stat', 'dmc', 'storeconfig', 'opt', 'traces']) self.scalars = set([ 'localenergy', 'localpotential', 'kinetic', 'elecelec', 'localecp', 'nonlocalecp', 'ionion', 'localenergy_sq', 'acceptratio', 'blockcpu', 'blockweight', 'mpc', 'kecorr' ]) self.fields = set([ 'energydensity', 'density', 'dm1b', 'spindensity', 'structurefactor' ]) hdf_data_sources = set(['stat', 'storeconfig', 'traces']) if h5py_unavailable: self.data_sources -= hdf_data_sources #end if self.analyzer_quantities = set(self.fields) self.analyzers = obj(scalars_dat=ScalarsDatAnalyzer, scalars_hdf=ScalarsHDFAnalyzer, dmc_dat=DmcDatAnalyzer, traces=TracesAnalyzer, energydensity=EnergyDensityAnalyzer, dm1b=DensityMatricesAnalyzer, spindensity=SpinDensityAnalyzer, structurefactor=StructureFactorAnalyzer, density=DensityAnalyzer) self.quantities = self.scalars | self.fields self.ignorable_estimators = set(['LocalEnergy']) self.quantity_aliases = dict() for q in self.analyzer_quantities: self.quantity_aliases[q] = q #end for self.future_quantities = set( ['StructureFactor', 'MomentumDistribution']) return
def test_convert4qmc_input_generate(): from generic import obj from qmcpack_converters import generate_convert4qmc_input ci = generate_convert4qmc_input( gamess='gamess.out', hdf5=True, ) ci_ref = obj( add_3body_J=False, add_cusp=False, app_name='convert4qmc', casino=None, ci=None, first=None, gamess='gamess.out', gamess_ascii=None, gamess_fmo=None, gamess_xml=None, gaussian=None, gridtype=None, hdf5=True, ion_tag=None, last=None, multidet=None, natural_orbitals=None, no_jastrow=False, opt_det_coeffs=False, orbitals=None, prefix=None, production=False, psi_tag=None, pyscf=None, qp=None, read_initial_guess=None, size=None, target_state=None, threshold=None, vsvb=None, zero_ci=False, ) assert (object_eq(ci.to_obj(), ci_ref))
def check_attributes(self, exit=False): msg = '' cls = self.__class__ a = obj() for name in cls.toplevel_attributes: if name in self: a[name] = self[name] #end if #end for props = cls.attribute_definitions for name in cls.sublevel_attributes: p = props[name] if p.dest in self: sub = self[p.dest] if name in sub: a[name] = sub[name] #end if #end if #end for present = set(a.keys()) missing = cls.required_attributes - present if len(missing) > 0: m = '' for n in sorted(missing): m += '\n ' + n #end for msg += 'Required attributes are missing.\nPlease provide the following attributes during initialization:{}\n'.format( m) #end if for name in cls.typed_attributes: if name in a: p = props[name] v = a[name] if not isinstance(v, p.type): msg += 'Attribute "{}" has invalid type.\n Type expected: {}\n Type present: {}\n'.format( name, p.type.__name__, v.__class__.__name__) #end if #end if #end for valid = len(msg) == 0 if not valid and exit: self.error(msg) #end if return valid
def get_result(self, result_name, sim): result = obj() rc = self.input.run_control if result_name == 'orbitals': if rc.run_type == 'save_for_qmcpack': result.outfile = os.path.join(self.locdir, self.outfile) elif rc.save_for_qmcpack: result.outfile = os.path.join( self.locdir, '{0}_savewf.out'.format(self.identifier)) else: self.error( "cannot get orbitals\ntracking of save_for_qmcpack is somehow corrupted\nthis is a developer error" ) #end if else: self.error('ability to get result ' + result_name + ' has not been implemented') #end if return result
def get_result(self, result_name, sim): result = obj() input = self.input if result_name == 'structure': # OUTCAR structure is not as precise as CONTCAR structure #pa = self.load_analyzer_image() #elem = input.poscar.elem #elem_count = input.poscar.elem_count #atoms = [] #for i in range(len(elem)): # atoms += elem_count[i]*[elem[i]] ##end for #structure = Structure( # units = 'A', # axes = pa.lattice_vectors.copy(), # elem = atoms, # pos = pa.position.copy() # ) # get structure from CONTCAR ccfile = os.path.join(self.locdir, self.identifier + '.CONTCAR') if not os.path.exists(ccfile): self.error( 'CONTCAR file does not exist for relax simulation at ' + self.locdir) #end if contcar = Poscar(ccfile) structure = Structure() if contcar.elem != None: structure.read_poscar(ccfile) else: elem, elem_count = self.system.structure.order_by_species() structure.read_poscar(ccfile, elem=elem) #end if if input.poscar.dynamic != None: structure.freeze(input.poscar.dynamic, negate=True) #end if result.structure = structure else: self.error('ability to get result ' + result_name + ' has not been implemented') #end if return result
class GuessGroup(KeywordSpecGroup): keywords = set([ 'guess', 'prtmo', 'punmo', 'mix', 'norb', 'norder', 'iorder', 'jorder', 'insorb', 'purify', 'tolz', 'tole', 'symden' ]) integers = set(['norb', 'norder', 'insorb']) reals = set(['tolz', 'tole']) bools = set(['prtmo', 'punmo', 'mix', 'purify', 'symden']) strings = set(['guess']) arrays = set(['iorder', 'jorder']) allowed_values = obj( guess=set([ 'huckel', 'hcore', 'moread', 'rdmini', 'mosaved', 'skip', 'fmo', 'hucsub', 'dmread' ]), norder=set([0, 1]), )
class CidrtGroup(KeywordSpecGroup): keywords = set([ 'group','fors' ,'foci' ,'soci','iexcit','intact','nfzc' , 'ndoc' ,'naos' ,'nbos' ,'nalp','nval' ,'next' ,'nfzv' ,'stsym', 'noirr','mxnint','mxneme','nprt' ]) integers = set(['iexcit','nfzc','ndoc','naos','nbos','nalp','nval', 'next','nfzv','noirr','mxnint','mxneme','nprt']) bools = set(['fors','foci','soci','intact']) strings = set(['group','stsym']) allowed_values = obj( group = set(['c1','c2','ci','cs','c2v','c2h','d2','d2h','c4v','d4','d4h']), stsym = set(['a','ag','au','ap','app','a','b','a1','a2','b1','b2','ag', 'bu','bg','au','a','b1','b2','b3','ag','b1g','b2g','b3g', 'au','b1u','b2u','b3u']), nprt = set([0,1,2,3]), )
def read_results(self, logfile): results = obj() if 'setup_info' in self and 'run_mode' in self.setup_info: mode = self.setup_info.run_mode else: return #end if f = logfile if mode == 'scf': f.seek('final total energy', 1) t = f.readtokens() results.energy = float(t[-2]) results.energy_units = t[-1] elif mode == 'band': None else: self.warn( 'Results not read.\nUnrecognized run mode: {}'.format(mode)) #end if self.results = results
def pseudize(self,**valency): errors = False for ion,valence_charge in valency.iteritems(): if ion in self.particles: ionp = self.particles[ion] if isinstance(ionp,Ion): self.particles[ion] = ionp.pseudize(valence_charge) else: self.error(ion+' cannot be pseudized',exit=False) #end if else: self.error(ion+' is not in the physical system',exit=False) errors = True #end if #end for if errors: self.error('system cannot be generated') #end if self.valency = obj(**valency) self.update()
def get_result(self,result_name,sim): result = obj() analyzer = self.load_analyzer_image() if result_name=='jastrow' or result_name=='wavefunction': if not 'results' in analyzer or not 'optimization' in analyzer.results: self.error('analyzer did not compute results required to determine jastrow') #end if opt_file = analyzer.results.optimization.optimal_file opt_file = str(opt_file) result.opt_file = os.path.join(self.locdir,opt_file) elif result_name=='cuspcorr': result.spo_up_cusps = os.path.join(self.locdir,self.identifier+'.spo-up.cuspInfo.xml') result.spo_dn_cusps = os.path.join(self.locdir,self.identifier+'.spo-dn.cuspInfo.xml') result.updet_cusps = os.path.join(self.locdir,'updet.cuspInfo.xml') result.dndet_cusps = os.path.join(self.locdir,'downdet.cuspInfo.xml') else: self.error('ability to get result '+result_name+' has not been implemented') #end if del analyzer return result
def set_dev_instruction(cls, situation='writing a derived class', class_variables=None, class_functions=None, member_variables=None, member_functions=None): if class_variables is None: class_variables = [] if class_functions is None: class_functions = [] if member_variables is None: member_variables = [] if member_functions is None: member_functions = [] ins = obj() cls.dev_instructions_data[situation] = ins ins.class_variables = class_variables ins.class_functions = class_functions ins.member_variables = member_variables ins.member_functions = member_functions
def read_states(self): log = self.log log.seek('state #') nstates = 0 elem_ind = set() elem = [] while True: line = log.readline() tokens = line.replace('(', ' ').replace(')', ' ').split() if not (len(tokens) > 0 and tokens[0] == 'state'): break #end if ei, e = tokens[4], tokens[5] if ei not in elem_ind: elem.append(e) elem_ind.add(ei) #end if nstates += 1 #end while self.states = obj(nstates=nstates, elem=elem)
class QAobject(Pobj): _global = obj() _global.dynamic_methods_objects = [] plotter = Plotter() opt_methods = set(['opt', 'linear', 'cslinear']) def __init__(self): return #end def __init__ @staticmethod def condense_name(name): return name.strip().lower().replace(' ', '_').replace('-', '_').replace( '__', '_') #end def condense_name def _register_dynamic_methods(self): QAobject._global.dynamic_methods_objects.append(self) return #end def _register_dynamic_methods def _unlink_dynamic_methods(self): for o in QAobject._global.dynamic_methods_objects: o._unset_dynamic_methods() #end for return #end def _unlink_dynamic_methods def _relink_dynamic_methods(self): for o in QAobject._global.dynamic_methods_objects: o._reset_dynamic_methods() #end for return
def get_result(self, result_name, sim): result = obj() inputpp = self.input.inputpp prefix = 'pwscf' outdir = './' if 'prefix' in inputpp: prefix = inputpp.prefix #end if if 'outdir' in inputpp: outdir = inputpp.outdir #end if if outdir.startswith('./'): outdir = outdir[2:] #end if if result_name == '': None else: self.error('ability to get result ' + result_name + ' has not been implemented') #end if return result
def md_statistics(self, equil=None, autocorr=None): import numpy as np from numerics import simstats, simplestats mds = obj() for q, v in self.md_data.items(): if equil is not None: v = v[equil:] #end if if autocorr is None: mean, var, error, kappa = simstats(v) else: nv = len(v) nb = int(np.floor(float(nv) / autocorr)) nexclude = nv - nb * autocorr v = v[nexclude:] v.shape = nb, autocorr mean, error = simplestats(v.mean(axis=1)) #end if mds[q] = mean, error #end for return mds