from functional import Functional # Reads program options. parser = OptionParser() parser.add_option( "--save", dest="saveme", help="Pickle job folder.",\ metavar="FILE", type="str") parser.add_option( "--load", dest="loadme", help="Loads a pickled job folder.", \ metavar="FILE", type="str") parser.add_option( "--pools", dest="pools", help="Number of mpi pools.", \ metavar="N", type="int", default=1) parser.add_option( "--pbs", dest="pbs", help="Directory where to write pbs stuff.", \ metavar="DIRECTORY", type="str") (options, args) = parser.parse_args() # takes care of mpi pools local_comm = world.split(world.rank % options.pools) # creates a functional instance. functional = Functional() # Creates dictionary if options.loadme is None or options.saveme is not None: job_dictionary = jobs.JobFolder() for caps in ["A", "B", "C", "D"]: capsjob = job_dictionary / caps if caps == "B": capsjob.vasp = functional capsjob.args = "beast", 666 capsjob["indict"] = "indicted"
def main(system_params=None): from boost.mpi import world, broadcast from pylada.vasp.extract import Extract, ExtractGW from pylada.vasp.specie import U, nlep from pylada.vasp import Vasp import os from os import path from .nlep import Objective from pylada.vasp.nlep.postprocessing import find_best, load_test, prepare_analog_fit print("mpi rank %d of %d" % (world.rank, world.size)) # Things which need to be put into dictionary of input script. # global_dict = { "Extract": Extract, "ExtractGW": ExtractGW, "Vasp": Vasp, "nlep": nlep, "U": U } # read file governing run. other input files should be generic run_input = load_run_input() run_control = RunControl(run_input) load_from_analogs = run_input.load_from_analogs # create systems from .systems import setup_systems systems = setup_systems(run_input, system_params) if (run_input.nbjobs == -1): run_control.nbjobs = world.size assert world.size >= run_control.nbjobs, "Too few procs granted, or too many jobs requested." job_comm = world vasp_comm = world color = world.rank % run_control.nbjobs job_comm = world.split(color) vasp_comm = job_comm if (job_comm.size > 1): vasp_comm = job_comm.split(job_comm.rank) create_directories(run_control, systems, job_comm, color) # barrier to be sure all necessary directories exist world.barrier() # creates objective functions from systems for s in systems.systems: print(s.outdir) s.objective = Objective(s.input.vasp, s.input.dft_in, s.input.gw_in, outdir=s.outdir, comm=vasp_comm, units=run_input.units) s.evals_file = file("%s/evals" % s.outdir, "w") systems.evals_file = file("%s/evals" % run_control.outdir, "w") # setup correspondence between vector "x" and list of species (ie at least # one subpart shared by > 1 systems): systems.setup_species() # barrier to be sure all necessary directories exist world.barrier() # only serial vasp so far, but lmmin is a parallel fitter if (job_comm.rank != 0 and run_input.optimizer != "lmmin"): print("rank %d superfluous, returning; beware barrier" % world.rank) return print("world rank %d job %d local rank %d working dir %s" % (world.rank, color, job_comm.rank, run_control.outdir)) job_main(job_comm, run_input, systems, vasp_comm)
from pylada.opt.changedir import Changedir from pylada.opt import Redirect # pseudo files pseudo_files = [ "maskr", "vq.Ge", "vq.Si", "vq.SiGe.Ge", "vq.SiGe.Si", "vwr.pso" ] # directories where jobs are at. jobs = ["VBM", "Gamma", "X", "L", "W1", "W2"] testdir = "test_input" workdir = "work" if world.rank == 0 and (not exists(workdir)): makedirs(workdir) # creates local comm split into N groups. N = 2 color = world.rank % N local_comm = world.split(color) # launch escan for different jobs. for i, dir in enumerate(jobs): # splits job according to color. if i % N != color: continue # creates working directory with all input files. workhere = join(workdir, dir) if local_comm.rank == 0: if exists(workhere): rmtree(workhere) # deletes everything if already there. makedirs(workhere) # symlink potential files testhere = join(testdir, "pseudos") for file in pseudo_files: copy(join(testhere, file), workhere) # symlinks input files.