def submit_jobs(self, mvals, AGrad=False, AHess=False): n=0 id_string = "%s_%i-%i" % (self.name, Counter(), n) while os.path.exists('%s.out' % id_string): n+=1 id_string = "%s_%i-%i" % (self.name, Counter(), n) with open('forcebalance.p','w') as f: forcebalance.nifty.lp_dump((mvals, AGrad, AHess, id_string, self.r_options, self.r_tgt_opts, self.FF),f) forcebalance.nifty.LinkFile(os.path.join(os.path.split(__file__)[0],"data","rtarget.py"),"rtarget.py") forcebalance.nifty.LinkFile(os.path.join(self.root,"temp", self.name, "target.tar.bz2"),"%s.tar.bz2" % self.name) wq = getWorkQueue() logger.info("Sending target '%s' to work queue for remote evaluation\n" % self.name) # input: # forcebalance.p: pickled mvals, options, and forcefield # rtarget.py: remote target evaluation script # target.tar.bz2: tarred target # output: # objective.p: pickled objective function dictionary # indicate.log: results of target.indicate() written to file forcebalance.nifty.queue_up(wq, "python rtarget.py > %s.out 2>&1" % id_string, ["forcebalance.p", "rtarget.py", "%s.tar.bz2" % self.name], ['objective_%s.p' % id_string, 'indicate_%s.log' % id_string, '%s.out' % id_string], tgt=self) self.id_string = id_string
def Target_Terms(self, mvals, Order=0, verbose=False, customdir=None): ## This is the objective function; it's a dictionary containing the value, first and second derivatives Objective = {'X':0.0, 'G':np.zeros(self.FF.np), 'H':np.zeros((self.FF.np,self.FF.np))} # Loop through the targets, stage the directories and submit the Work Queue processes. for Tgt in self.Targets: Tgt.stage(mvals, AGrad = Order >= 1, AHess = Order >= 2, customdir=customdir) if self.asynchronous: # Asynchronous evaluation of objective function and Work Queue tasks. # Create a list of the targets, and remove them from the list as they are finished. Need2Evaluate = self.Targets[:] # This ensures that the OrderedDict doesn't get out of order. for Tgt in self.Targets: self.ObjDict[Tgt.name] = None # Loop through the targets and compute the objective function for ones that are finished. while len(Need2Evaluate) > 0: for Tgt in Need2Evaluate: if Tgt.wq_complete(): # List of functions that I can call. Funcs = [Tgt.get_X, Tgt.get_G, Tgt.get_H] # Call the appropriate function Ans = Funcs[Order](mvals, customdir=customdir) # Print out the qualitative indicators if verbose: Tgt.meta_indicate(customdir=customdir) # Note that no matter which order of function we call, we still increment the objective / gradient / Hessian the same way. if not in_fd(): self.ObjDict[Tgt.name] = {'w' : Tgt.weight/self.WTot , 'x' : Ans['X']} for i in range(3): Objective[Letters[i]] += Ans[Letters[i]]*Tgt.weight/self.WTot Need2Evaluate.remove(Tgt) break else: pass else: wq = getWorkQueue() if wq != None: wq_wait(wq) for Tgt in self.Targets: # The first call is always done at the midpoint. Tgt.bSave = True # List of functions that I can call. Funcs = [Tgt.get_X, Tgt.get_G, Tgt.get_H] # Call the appropriate function Ans = Funcs[Order](mvals, customdir=customdir) # Print out the qualitative indicators if verbose: Tgt.meta_indicate(customdir=customdir) # Note that no matter which order of function we call, we still increment the objective / gradient / Hessian the same way. if not in_fd(): self.ObjDict[Tgt.name] = {'w' : Tgt.weight/self.WTot , 'x' : Ans['X']} for i in range(3): Objective[Letters[i]] += Ans[Letters[i]]*Tgt.weight/self.WTot # The target has evaluated at least once. for Tgt in self.Targets: Tgt.evaluated = True # Safeguard to make sure we don't have exact zeros on Hessian diagonal for i in range(self.FF.np): if Objective['H'][i,i] == 0.0: Objective['H'][i,i] = 1.0 return Objective
def submit_jobs(self, mvals, AGrad=False, AHess=False): id_string = "%s_iter%04i" % (self.name, Counter()) self.serialize_ff(mvals, outside="forcefield-remote") forcebalance.nifty.lp_dump((AGrad, AHess, id_string, self.r_options, self.r_tgt_opts, self.pgrad),'options.p') # Link in the rpfx script. if len(self.rpfx) > 0: forcebalance.nifty.LinkFile(os.path.join(os.path.split(__file__)[0],"data",self.rpfx),self.rpfx) forcebalance.nifty.LinkFile(os.path.join(os.path.split(__file__)[0],"data","rtarget.py"),"rtarget.py") forcebalance.nifty.LinkFile(os.path.join(self.root, self.tempdir, "target.tar.bz2"),"target.tar.bz2") wq = getWorkQueue() # logger.info("Sending target '%s' to work queue for remote evaluation\n" % self.name) # input: # forcefield.p: pickled force field # options.p: pickled mvals, options # rtarget.py: remote target evaluation script # target.tar.bz2: tarred target # output: # objective.p: pickled objective function dictionary # indicate.log: results of target.indicate() written to file # if len(self.rpfx) > 0 and self.rpfx not in ['rungmx.sh', 'runcuda.sh']: # logger.error('Unsupported prefix script for launching remote target') # raise RuntimeError forcebalance.nifty.queue_up(wq, "%spython rtarget.py > rtarget.out 2>&1" % (("sh %s%s " % (self.rpfx, " -b" if self.rbak else "")) if len(self.rpfx) > 0 else ""), ["forcefield.p", "options.p", "rtarget.py", "target.tar.bz2"] + ([self.rpfx] if len(self.rpfx) > 0 else []), ['objective.p', 'indicate.log', 'rtarget.out'], tgt=self, tag=self.name, verbose=False)
def npt_simulation(self, temperature, pressure, simnum): """ Submit a NPT simulation to the Work Queue. """ wq = getWorkQueue() if not (os.path.exists('npt_result.p') or os.path.exists('npt_result.p.bz2')): link_dir_contents(os.path.join(self.root,self.rundir),os.getcwd()) if wq == None: print "Running condensed phase simulation locally." print "You may tail -f %s/npt.out in another terminal window" % os.getcwd() # if GoodStep() and (temperature, pressure) in self.DynDict_New: # self.DynDict[(temperature, pressure)] = self.DynDict_New[(temperature, pressure)] # if (temperature, pressure) in self.DynDict: # dynsrc = self.DynDict[(temperature, pressure)] # dyndest = os.path.join(os.getcwd(), 'liquid.dyn') # print "Copying .dyn file: %s to %s" % (dynsrc, dyndest) # shutil.copy2(dynsrc,dyndest) cmdstr = 'python npt.py gromacs %i %.3f %.3f %.3f %.3f %s --liquid_equ_steps %i &> npt.out' % \ (self.liquid_prod_steps, self.liquid_timestep, self.liquid_interval, temperature, pressure, " --minimize_energy" if self.minimize_energy else "", self.liquid_equ_steps) _exec(cmdstr) # self.DynDict_New[(temperature, pressure)] = os.path.join(os.getcwd(),'liquid.dyn') else: # This part of the code has never been used before # Still need to figure out where to specify GROMACS location on each cluster # queue_up(wq, # command = 'python npt.py liquid.xyz %.3f %.3f &> npt.out' % (temperature, pressure), # input_files = ['liquid.xyz','liquid.key','mono.xyz','mono.key','forcebalance.p','npt.py'], # output_files = ['npt_result.p.bz2', 'npt.py'] + self.FF.fnms, # tgt=self) raise RuntimeError('Remote GROMACS execution is not yet enabled')
def run_simulation(self, label, liq, AGrad=True): """ Submit a simulation to the Work Queue or run it locally. Inputs: label = The name of the molecule (and hopefully the folder name that you're running in) liq = True/false flag indicating whether to run in liquid or gas phase """ wq = getWorkQueue() # Create a dictionary of MD options that the script will read. md_opts = OrderedDict() md_opts['temperature'] = self.hfe_temperature md_opts['pressure'] = self.hfe_pressure md_opts['minimize'] = True if liq: sdnm = 'liq' md_opts['nequil'] = self.liquid_eq_steps md_opts['nsteps'] = self.liquid_md_steps md_opts['timestep'] = self.liquid_timestep md_opts['sample'] = self.liquid_interval else: sdnm = 'gas' md_opts['nequil'] = self.gas_eq_steps md_opts['nsteps'] = self.gas_md_steps md_opts['timestep'] = self.gas_timestep md_opts['sample'] = self.gas_interval eng_opts = deepcopy(self.engine_opts) # Enforce implicit solvent in the liquid simulation. # We need to be more careful with this when running explicit solvent. eng_opts['implicit_solvent'] = liq eng_opts['coords'] = os.path.basename(self.molecules[label]) if not os.path.exists(sdnm): os.makedirs(sdnm) os.chdir(sdnm) if not os.path.exists('md_result.p'): # Link in a bunch of files... what were these again? link_dir_contents(os.path.join(self.root,self.rundir),os.getcwd()) # Link in the scripts required to run the simulation for f in self.scripts: LinkFile(os.path.join(os.path.split(__file__)[0],"data",f),os.path.join(os.getcwd(),f)) # Link in the coordinate file. LinkFile(self.molecules[label], './%s' % os.path.basename(self.molecules[label])) # Store names of previous trajectory files. self.last_traj += [os.path.join(os.getcwd(), i) for i in self.extra_output] # Write target, engine and simulation options to disk. lp_dump((self.OptionDict, eng_opts, md_opts), 'simulation.p') # Execute the script for running molecular dynamics. cmdstr = '%s python md_ism_hfe.py %s' % (self.prefix, "-g" if AGrad else "") if wq == None: logger.info("Running condensed phase simulation locally.\n") logger.info("You may tail -f %s/npt.out in another terminal window\n" % os.getcwd()) _exec(cmdstr, copy_stderr=True, outfnm='md.out') else: queue_up(wq, command = cmdstr+' &> md.out', tag='%s:%s/%s' % (self.name, label, "liq" if liq else "gas"), input_files = self.scripts + ['simulation.p', 'forcefield.p', os.path.basename(self.molecules[label])], output_files = ['md_result.p', 'md.out'] + self.extra_output, tgt=self, verbose=False, print_time=3600) os.chdir('..')
def Target_Terms(self, mvals, Order=0, verbose=False, customdir=None): ## This is the objective function; it's a dictionary containing the value, first and second derivatives Objective = {'X':0.0, 'G':np.zeros(self.FF.np), 'H':np.zeros((self.FF.np,self.FF.np))} # Loop through the targets, stage the directories and submit the Work Queue processes. for Tgt in self.Targets: Tgt.stage(mvals, AGrad = Order >= 1, AHess = Order >= 2, customdir=customdir) if self.asynchronous: # Asynchronous evaluation of objective function and Work Queue tasks. # Create a list of the targets, and remove them from the list as they are finished. Need2Evaluate = self.Targets[:] # This ensures that the OrderedDict doesn't get out of order. for Tgt in self.Targets: self.ObjDict[Tgt.name] = None # Loop through the targets and compute the objective function for ones that are finished. while len(Need2Evaluate) > 0: for Tgt in Need2Evaluate: if Tgt.wq_complete(): # List of functions that I can call. Funcs = [Tgt.get_X, Tgt.get_G, Tgt.get_H] # Call the appropriate function Ans = Funcs[Order](mvals, customdir=customdir) # Print out the qualitative indicators if verbose: Tgt.meta_indicate(customdir=customdir) # Note that no matter which order of function we call, we still increment the objective / gradient / Hessian the same way. if not in_fd(): self.ObjDict[Tgt.name] = {'w' : Tgt.weight/self.WTot , 'x' : Ans['X']} for i in range(3): Objective[Letters[i]] += Ans[Letters[i]]*Tgt.weight/self.WTot Need2Evaluate.remove(Tgt) break else: pass else: wq = getWorkQueue() if wq is not None: wq_wait(wq) for Tgt in self.Targets: # The first call is always done at the midpoint. Tgt.bSave = True # List of functions that I can call. Funcs = [Tgt.get_X, Tgt.get_G, Tgt.get_H] # Call the appropriate function Ans = Funcs[Order](mvals, customdir=customdir) # Print out the qualitative indicators if verbose: Tgt.meta_indicate(customdir=customdir) # Note that no matter which order of function we call, we still increment the objective / gradient / Hessian the same way. if not in_fd(): self.ObjDict[Tgt.name] = {'w' : Tgt.weight/self.WTot , 'x' : Ans['X']} for i in range(3): Objective[Letters[i]] += Ans[Letters[i]]*Tgt.weight/self.WTot # The target has evaluated at least once. for Tgt in self.Targets: Tgt.evaluated = True # Safeguard to make sure we don't have exact zeros on Hessian diagonal for i in range(self.FF.np): if Objective['H'][i,i] == 0.0: Objective['H'][i,i] = 1.0 return Objective
def run_simulation(self, label, liq, AGrad=True): """ Submit a simulation to the Work Queue or run it locally. Inputs: label = The name of the molecule (and hopefully the folder name that you're running in) liq = True/false flag indicating whether to run in liquid or gas phase """ wq = getWorkQueue() # Create a dictionary of MD options that the script will read. md_opts = OrderedDict() md_opts['temperature'] = self.hfe_temperature md_opts['pressure'] = self.hfe_pressure md_opts['minimize'] = True if liq: sdnm = 'liq' md_opts['nequil'] = self.liquid_eq_steps md_opts['nsteps'] = self.liquid_md_steps md_opts['timestep'] = self.liquid_timestep md_opts['sample'] = self.liquid_interval else: sdnm = 'gas' md_opts['nequil'] = self.gas_eq_steps md_opts['nsteps'] = self.gas_md_steps md_opts['timestep'] = self.gas_timestep md_opts['sample'] = self.gas_interval eng_opts = deepcopy(self.engine_opts) # Enforce implicit solvent in the liquid simulation. # We need to be more careful with this when running explicit solvent. eng_opts['implicit_solvent'] = liq eng_opts['coords'] = os.path.basename(self.molecules[label]) if not os.path.exists(sdnm): os.makedirs(sdnm) os.chdir(sdnm) if not os.path.exists('md_result.p'): # Link in a bunch of files... what were these again? link_dir_contents(os.path.join(self.root,self.rundir),os.getcwd()) # Link in the scripts required to run the simulation for f in self.scripts: LinkFile(os.path.join(os.path.split(__file__)[0],"data",f),os.path.join(os.getcwd(),f)) # Link in the coordinate file. LinkFile(self.molecules[label], './%s' % os.path.basename(self.molecules[label])) # Store names of previous trajectory files. self.last_traj += [os.path.join(os.getcwd(), i) for i in self.extra_output] # Write target, engine and simulation options to disk. lp_dump((self.OptionDict, eng_opts, md_opts), 'simulation.p') # Execute the script for running molecular dynamics. cmdstr = '%s python md_ism_hfe.py %s' % (self.prefix, "-g" if AGrad else "") if wq is None: logger.info("Running condensed phase simulation locally.\n") logger.info("You may tail -f %s/npt.out in another terminal window\n" % os.getcwd()) _exec(cmdstr, copy_stderr=True, outfnm='md.out') else: queue_up(wq, command = cmdstr+' &> md.out', tag='%s:%s/%s' % (self.name, label, "liq" if liq else "gas"), input_files = self.scripts + ['simulation.p', 'forcefield.p', os.path.basename(self.molecules[label])], output_files = ['md_result.p', 'md.out'] + self.extra_output, tgt=self, verbose=False, print_time=3600) os.chdir('..')
def wq_complete(self): """ This method determines whether the Work Queue tasks for the current target have completed. """ wq = getWorkQueue() WQIds = getWQIds() if wq is None: return True elif wq.empty(): WQIds[self.name] = [] return True elif len(WQIds[self.name]) == 0: return True else: wq_wait1(wq, wait_time=30) if len(WQIds[self.name]) == 0: return True else: return False
def wq_complete(self): """ This method determines whether the Work Queue tasks for the current target have completed. """ wq = getWorkQueue() WQIds = getWQIds() if wq == None: return True elif wq.empty(): WQIds[self.name] = [] return True elif len(WQIds[self.name]) == 0: return True else: wq_wait1(wq, wait_time=30) if len(WQIds[self.name]) == 0: return True else: return False
def interaction_driver_all(self, dielectric=False): """ Computes the energy and force using GROMACS for a trajectory. This does not require GROMACS-X2. """ # Remove backup files. rm_gmx_baks(os.getcwd()) # Do the interacting calculation. _exec(["./grompp", "-f", "interaction.mdp", "-n", "index.ndx"], print_command=False) _exec(["./mdrun", "-nt", "1", "-rerunvsite", "-rerun", "all.gro"], print_command=False) # Gather information _exec(["./g_energy","-xvg","no"], print_command=False, stdin="Potential\n") Interact = array([float(l.split()[1]) for l in open('energy.xvg').readlines()]) # Do the excluded calculation. _exec(["./grompp", "-f", "excluded.mdp", "-n", "index.ndx"], print_command=False) _exec(["./mdrun", "-nt", "1", "-rerunvsite", "-rerun", "all.gro"], print_command=False) # Gather information _exec(["./g_energy","-xvg","no"], print_command=False, stdin="Potential\n") Excluded = array([float(l.split()[1]) for l in open('energy.xvg').readlines()]) # The interaction energy. M = Interact - Excluded # Now we have the MM interaction energy. # We need the COSMO component of the interaction energy now... if dielectric: traj_dimer = deepcopy(self.traj) traj_dimer.add_quantum("qtemp_D.in") traj_dimer.write("qchem_dimer.in",ftype="qcin") traj_monoA = deepcopy(self.traj) traj_monoA.add_quantum("qtemp_A.in") traj_monoA.write("qchem_monoA.in",ftype="qcin") traj_monoB = deepcopy(self.traj) traj_monoB.add_quantum("qtemp_B.in") traj_monoB.write("qchem_monoB.in",ftype="qcin") wq = getWorkQueue() if wq == None: warn_press_key("To proceed past this point, a Work Queue must be present") print "Computing the dielectric energy" Diel_D = QChem_Dielectric_Energy("qchem_dimer.in",wq) Diel_A = QChem_Dielectric_Energy("qchem_monoA.in",wq) # The dielectric energy for a water molecule should never change. if hasattr(self,"Diel_B"): Diel_B = self.Diel_B else: Diel_B = QChem_Dielectric_Energy("qchem_monoB.in",self.wq) self.Diel_B = Diel_B self.Dielectric = Diel_D - Diel_A - Diel_B M += self.Dielectric return M
def calc_wq_new(self, coords, dirname): wq = getWorkQueue() if not os.path.exists(dirname): os.makedirs(dirname) # Convert coordinates back to the xyz file< self.M.xyzs[0] = coords.reshape(-1, 3) * bohr2ang self.M.edit_qcrems({'jobtype': 'force'}) self.M[0].write(os.path.join(dirname, 'run.in')) in_files = [('%s/run.in' % dirname, 'run.in')] out_files = [('%s/run.out' % dirname, 'run.out'), ('%s/run.log' % dirname, 'run.log')] if self.qcdir: raise RuntimeError( "--qcdir currently not supported with Work Queue") queue_up_src_dest(wq, "qchem%s run.in run.out &> run.log" % self.nt(), in_files, out_files, verbose=False)
def get(self, mvals, AGrad=False, AHess=False): """ LPW 04-17-2013 This subroutine builds the objective function from Psi4. @param[in] mvals Mathematical parameter values @param[in] AGrad Switch to turn on analytic gradient @param[in] AHess Switch to turn on analytic Hessian @return Answer Contribution to the objective function """ Answer = {} Fac = 1000000 n = len(mvals) X = 0.0 G = np.zeros(n) H = np.zeros((n,n)) pvals = self.FF.make(mvals) self.tdir = os.getcwd() self.objd = OrderedDict() self.gradd = OrderedDict() self.hdiagd = OrderedDict() wq = getWorkQueue() def fdwrap2(func,mvals0,pidx,qidx,key=None,**kwargs): def func2(arg1,arg2): mvals = list(mvals0) mvals[pidx] += arg1 mvals[qidx] += arg2 logger.info("\rfdwrap2:" + func.__name__ + "[%i] = % .1e , [%i] = % .1e" % (pidx, arg1, qidx, arg2) + ' '*50) if key != None: return func(mvals,**kwargs)[key] else: return func(mvals,**kwargs) return func2 def f2d5p(f, h): fpp, fpm, fmp, fmm = [f(i*h,j*h) for i,j in [(1,1),(1,-1),(-1,1),(-1,-1)]] fpp = (fpp-fpm-fmp+fmm)/(4*h*h) return fpp def f2d4p(f, h, f0 = None): if f0 == None: fpp, fp0, f0p, f0 = [f(i*h,j*h) for i,j in [(1,1),(1,0),(0,1),(0,0)]] else: fpp, fp0, f0p = [f(i*h,j*h) for i,j in [(1,1),(1,0),(0,1)]] fpp = (fpp-fp0-f0p+f0)/(h*h) return fpp for d in self.objfiles: logger.info("\rNow working on" + str(d) + 50*' ' + '\r') if wq == None: x = self.driver(mvals, d) grad = np.zeros(n) hdiag = np.zeros(n) hess = np.zeros((n,n)) apath = os.path.join(self.tdir, d, "current") x = float(open(os.path.join(apath,'objective.out')).readlines()[0].split()[1])*self.factor for p in range(self.FF.np): if self.callderivs[d][p]: def reader(mvals_,h): apath = os.path.join(self.tdir, d, str(p), str(h)) answer = float(open(os.path.join(apath,'objective.out')).readlines()[0].split()[1])*self.factor return answer if AHess: if wq != None: apath = os.path.join(self.tdir, d, "current") x = float(open(os.path.join(apath,'objective.out')).readlines()[0].split()[1])*self.factor grad[p], hdiag[p] = f12d3p(fdwrap(reader, mvals, p, h=self.h), h = self.h, f0 = x) else: grad[p], hdiag[p] = f12d3p(fdwrap(self.driver, mvals, p, d=d), h = self.h, f0 = x) hess[p,p] = hdiag[p] elif AGrad: if self.bidirect: if wq != None: apath = os.path.join(self.tdir, d, "current") x = float(open(os.path.join(apath,'objective.out')).readlines()[0].split()[1])*self.factor grad[p], _ = f12d3p(fdwrap(reader, mvals, p, h=self.h), h = self.h, f0 = x) else: grad[p], _ = f12d3p(fdwrap(self.driver, mvals, p, d=d), h = self.h, f0 = x) else: if wq != None: # Since the calculations are submitted as 3-point finite difference, this part of the code # actually only reads from half of the completed calculations. grad[p] = f1d2p(fdwrap(reader, mvals, p, h=self.h), h = self.h, f0 = x) else: grad[p] = f1d2p(fdwrap(self.driver, mvals, p, d=d), h = self.h, f0 = x) self.objd[d] = x self.gradd[d] = grad self.hdiagd[d] = hdiag X += x G += grad #H += np.diag(hdiag) H += hess if not in_fd(): self.objective = X self.objvals = self.objd # print self.objd # print self.gradd # print self.hdiagd if float('Inf') in pvals: return {'X' : 1e10, 'G' : G, 'H' : H} return {'X' : X, 'G' : G, 'H' : H}
def submit_jobs(self, mvals, AGrad=True, AHess=True): # This routine is called by Objective.stage() will run before "get". # It submits the jobs to the Work Queue and the stage() function will wait for jobs to complete. # self.tdir = os.getcwd() wq = getWorkQueue() if wq is None: return def submit_psi(this_apath, dname, these_mvals): """ Create a grid file and a psi4 input file in the absolute path and submit it to the work queue. """ cwd = os.getcwd() if not os.path.exists(this_apath): os.makedirs(this_apath) os.chdir(this_apath) self.FF.make(these_mvals) o = wopen('objective.dat') for line in self.objfiles[d]: s = line.split() if len(s) > 2 and s[0] == 'path' and s[1] == '=': print("path = '%s'" % os.getcwd(), file=o) elif len(s) > 2 and s[0] == 'set' and s[1] == 'objective_path': print("opath = '%s'" % os.getcwd(), file=o) print("set objective_path $opath", file=o) else: print(line, end=' ', file=o) o.close() os.system("rm -f objective.out") if wq is None: logger.info("There is no Work Queue!!!\n") sys.exit() else: input_files = [(os.path.join(this_apath, i), i) for i in glob.glob("*")] input_files += [(os.path.join(self.root, self.tgtdir, dname, "build.dat"), "build.dat")] input_files += [(os.path.join( os.path.split(__file__)[0], "data", "run_psi_rdvr3_objective.sh"), "run_psi_rdvr3_objective.sh")] logger.info("\r") queue_up_src_dest( wq, "sh run_psi_rdvr3_objective.sh -c %s &> run_psi_rdvr3_objective.log" % os.path.join(self.root, self.tgtdir, dname), input_files=input_files, output_files=[ (os.path.join(this_apath, i), i) for i in ["run_psi_rdvr3_objective.log", "output.dat"] ], verbose=False) os.chdir(cwd) for d in self.objfiles: logger.info("\rNow working on" + str(d) + 50 * ' ' + '\r') odir = os.path.join(os.getcwd(), d) #if os.path.exists(odir): # shutil.rmtree(odir) if not os.path.exists(odir): os.makedirs(odir) apath = os.path.join(odir, "current") submit_psi(apath, d, mvals) for p in range(self.FF.np): def subjob(mvals_, h): apath = os.path.join(odir, str(p), str(h)) submit_psi(apath, d, mvals_) #logger.info("Will set up a job for %s, parameter %i\n" % (d, p)) return 0.0 if self.callderivs[d][p]: if AHess: f12d3p(fdwrap(subjob, mvals, p, h=self.h), h=self.h, f0=0.0) elif AGrad: if self.bidirect: f12d3p(fdwrap(subjob, mvals, p, h=self.h), h=self.h, f0=0.0) else: f1d2p(fdwrap(subjob, mvals, p, h=self.h), h=self.h, f0=0.0)
def get(self, mvals, AGrad=False, AHess=False): """ LPW 04-17-2013 This subroutine builds the objective function from Psi4. @param[in] mvals Mathematical parameter values @param[in] AGrad Switch to turn on analytic gradient @param[in] AHess Switch to turn on analytic Hessian @return Answer Contribution to the objective function """ Answer = {} Fac = 1000000 n = len(mvals) X = 0.0 G = np.zeros(n) H = np.zeros((n, n)) pvals = self.FF.make(mvals) self.tdir = os.getcwd() self.objd = OrderedDict() self.gradd = OrderedDict() self.hdiagd = OrderedDict() wq = getWorkQueue() def fdwrap2(func, mvals0, pidx, qidx, key=None, **kwargs): def func2(arg1, arg2): mvals = list(mvals0) mvals[pidx] += arg1 mvals[qidx] += arg2 logger.info("\rfdwrap2:" + func.__name__ + "[%i] = % .1e , [%i] = % .1e" % (pidx, arg1, qidx, arg2) + ' ' * 50) if key is not None: return func(mvals, **kwargs)[key] else: return func(mvals, **kwargs) return func2 def f2d5p(f, h): fpp, fpm, fmp, fmm = [ f(i * h, j * h) for i, j in [(1, 1), (1, -1), (-1, 1), (-1, -1)] ] fpp = (fpp - fpm - fmp + fmm) / (4 * h * h) return fpp def f2d4p(f, h, f0=None): if f0 is None: fpp, fp0, f0p, f0 = [ f(i * h, j * h) for i, j in [(1, 1), (1, 0), (0, 1), (0, 0)] ] else: fpp, fp0, f0p = [ f(i * h, j * h) for i, j in [(1, 1), (1, 0), (0, 1)] ] fpp = (fpp - fp0 - f0p + f0) / (h * h) return fpp for d in self.objfiles: logger.info("\rNow working on" + str(d) + 50 * ' ' + '\r') if wq is None: x = self.driver(mvals, d) grad = np.zeros(n) hdiag = np.zeros(n) hess = np.zeros((n, n)) apath = os.path.join(self.tdir, d, "current") x = float( open(os.path.join( apath, 'objective.out')).readlines()[0].split()[1]) * self.factor for p in range(self.FF.np): if self.callderivs[d][p]: def reader(mvals_, h): apath = os.path.join(self.tdir, d, str(p), str(h)) answer = float( open(os.path.join(apath, 'objective.out')). readlines()[0].split()[1]) * self.factor return answer if AHess: if wq is not None: apath = os.path.join(self.tdir, d, "current") x = float( open(os.path.join(apath, 'objective.out')). readlines()[0].split()[1]) * self.factor grad[p], hdiag[p] = f12d3p(fdwrap(reader, mvals, p, h=self.h), h=self.h, f0=x) else: grad[p], hdiag[p] = f12d3p(fdwrap(self.driver, mvals, p, d=d), h=self.h, f0=x) hess[p, p] = hdiag[p] elif AGrad: if self.bidirect: if wq is not None: apath = os.path.join(self.tdir, d, "current") x = float( open(os.path.join(apath, 'objective.out')). readlines()[0].split()[1]) * self.factor grad[p], _ = f12d3p(fdwrap(reader, mvals, p, h=self.h), h=self.h, f0=x) else: grad[p], _ = f12d3p(fdwrap(self.driver, mvals, p, d=d), h=self.h, f0=x) else: if wq is not None: # Since the calculations are submitted as 3-point finite difference, this part of the code # actually only reads from half of the completed calculations. grad[p] = f1d2p(fdwrap(reader, mvals, p, h=self.h), h=self.h, f0=x) else: grad[p] = f1d2p(fdwrap(self.driver, mvals, p, d=d), h=self.h, f0=x) self.objd[d] = x self.gradd[d] = grad self.hdiagd[d] = hdiag X += x G += grad #H += np.diag(hdiag) H += hess if not in_fd(): self.objective = X self.objvals = self.objd # print self.objd # print self.gradd # print self.hdiagd if float('Inf') in pvals: return {'X': 1e10, 'G': G, 'H': H} return {'X': X, 'G': G, 'H': H}
def submit_jobs(self, mvals, AGrad=True, AHess=True): # This routine is called by Objective.stage() will run before "get". # It submits the jobs to the Work Queue and the stage() function will wait for jobs to complete. # self.tdir = os.getcwd() wq = getWorkQueue() if wq == None: return def submit_psi(this_apath, mname, these_mvals): """ Create a grid file and a psi4 input file in the absolute path and submit it to the work queue. """ cwd = os.getcwd() if not os.path.exists(this_apath) : os.makedirs(this_apath) os.chdir(this_apath) self.FF.make(these_mvals) o = wopen('objective.dat') for line in self.objfiles[d]: s = line.split() if len(s) > 2 and s[0] == 'path' and s[1] == '=': print >> o, "path = '%s'" % os.getcwd() elif len(s) > 2 and s[0] == 'set' and s[1] == 'objective_path': print >> o, "opath = '%s'" % os.getcwd() print >> o, "set objective_path $opath" else: print >> o, line, o.close() os.system("rm -f objective.out") if wq == None: logger.info("There is no Work Queue!!!\n") sys.exit() else: input_files = [(os.path.join(this_apath, i), i) for i in glob.glob("*")] # input_files += [(os.path.join(self.tgtdir,d,"build.dat"), "build.dat")] input_files += [(os.path.join(os.path.split(__file__)[0],"data","run_psi_rdvr3_objective.sh"), "run_psi_rdvr3_objective.sh")] logger.info("\r") queue_up_src_dest(wq,"sh run_psi_rdvr3_objective.sh %s &> run_psi_rdvr3_objective.log" % mname, input_files=input_files, output_files=[(os.path.join(this_apath, i),i) for i in ["run_psi_rdvr3_objective.log", "output.dat"]], verbose=False) os.chdir(cwd) for d in self.objfiles: logger.info("\rNow working on" + str(d) + 50*' ' + '\r') odir = os.path.join(os.getcwd(),d) #if os.path.exists(odir): # shutil.rmtree(odir) if not os.path.exists(odir): os.makedirs(odir) apath = os.path.join(odir, "current") submit_psi(apath, d, mvals) for p in range(self.FF.np): def subjob(mvals_,h): apath = os.path.join(odir, str(p), str(h)) submit_psi(apath, d, mvals_) #logger.info("Will set up a job for %s, parameter %i\n" % (d, p)) return 0.0 if self.callderivs[d][p]: if AHess: f12d3p(fdwrap(subjob, mvals, p, h=self.h), h = self.h, f0 = 0.0) elif AGrad: if self.bidirect: f12d3p(fdwrap(subjob, mvals, p, h=self.h), h = self.h, f0 = 0.0) else: f1d2p(fdwrap(subjob, mvals, p, h=self.h), h = self.h, f0 = 0.0)
def calc_wq_new(self, coords, dirname): # Run TeraChem wq = getWorkQueue() if not os.path.exists(dirname): os.makedirs(dirname) scrdir = os.path.join(dirname, 'scr') if not os.path.exists(scrdir): os.makedirs(scrdir) guesses = [] have_guess = False unrestricted = self.tcin['method'][0] == 'u' if unrestricted: guessfnms = ['ca0', 'cb0'] else: guessfnms = ['c0'] for f in ['c0', 'ca0', 'cb0']: if f not in guessfnms: continue if os.path.exists(os.path.join(dirname, 'scr', f)): shutil.move(os.path.join(dirname, 'scr', f), os.path.join(dirname, f)) guesses.append(f) if os.path.exists(os.path.join(dirname, f)): if f not in guesses: guesses.append(f) # Check if all the appropriate guess files have been found # and moved to "dirname" have_guess = (guesses == guessfnms) # This is for when we start geometry optimizations # and we have a guess prepped and ready to go. if not have_guess and 'guess' in self.tcin: for f in self.tcin['guess'].split(): if os.path.exists(f): shutil.copy2(f, dirname) guesses.append(f) have_guess = True else: del self.tcin['guess'] have_guess = False break self.tcin['coordinates'] = 'start.xyz' self.tcin['run'] = 'gradient' # For queueing up jobs, delete GPU key and let the worker decide self.tcin['gpus'] = None if have_guess: self.tcin['guess'] = ' '.join(guesses) self.tcin['purify'] = 'no' self.tcin['mixguess'] = "0.0" tcopts = edit_tcin(fout="%s/run.in" % dirname, options=self.tcin) # Convert coordinates back to the xyz file self.M.xyzs[0] = coords.reshape(-1, 3) * 0.529177 self.M[0].write(os.path.join(dirname, 'start.xyz')) in_files = [('%s/run.in' % dirname, 'run.in'), ('%s/start.xyz' % dirname, 'start.xyz')] out_files = [('%s/run.out' % dirname, 'run.out')] if have_guess: for g in guesses: in_files.append((os.path.join(dirname, g), g)) for g in guessfnms: out_files.append((os.path.join(dirname, 'scr', g), os.path.join('scr', g))) queue_up_src_dest(wq, "%s/runtc run.in &> run.out" % rootdir, in_files, out_files, verbose=False)