def do_launch(args): if not args.launchpad_file and os.path.exists( os.path.join(args.config_dir, 'my_launchpad.yaml')): args.launchpad_file = os.path.join(args.config_dir, 'my_launchpad.yaml') if not args.fworker_file and os.path.exists( os.path.join(args.config_dir, 'my_fworker.yaml')): args.fworker_file = os.path.join(args.config_dir, 'my_fworker.yaml') if not args.queueadapter_file and os.path.exists( os.path.join(args.config_dir, 'my_qadapter.yaml')): args.queueadapter_file = os.path.join(args.config_dir, 'my_qadapter.yaml') launchpad = LaunchPad.from_file( args.launchpad_file) if args.launchpad_file else LaunchPad( strm_lvl=args.loglvl) fworker = FWorker.from_file( args.fworker_file) if args.fworker_file else FWorker() queueadapter = load_object_from_file(args.queueadapter_file) args.loglvl = 'CRITICAL' if args.silencer else args.loglvl if args.command == 'rapidfire': rapidfire(launchpad, fworker, queueadapter, args.launch_dir, args.nlaunches, args.maxjobs_queue, args.maxjobs_block, args.sleep, args.reserve, args.loglvl) else: launch_rocket_to_queue(launchpad, fworker, queueadapter, args.launch_dir, args.reserve, args.loglvl, False)
def parse_wf_for_latt_constants(wf_id): lpad = LaunchPad().from_file(lpad_file_path) wf = lpad.get_wf_by_fw_id(wf_id) lattdata = {} print('{} workflow retrieved with {} fws in it'.format( wf.name, len(wf.fws))) for fw in wf.fws: print('\t{}'.format(fw.name)) if 'structure optimization' not in fw.name: raise ValueError("Not a recognized firework!") elif fw.state != 'COMPLETED': print('\t\tstatus = {}, so skipping'.format(fw.state)) continue pat = fw.launches[-1].launch_dir s = Poscar.from_file(os.path.join(pat, 'CONTCAR.relax2.gz')).structure nom = s.composition.reduced_formula if nom in lattdata: raise ValueError("{} already exists in lattdata??".format(nom)) elif (max(s.lattice.abc) - min(s.lattice.abc)) > 0.00001 or ( max(s.lattice.angles) - min(s.lattice.angles)) > 0.00001: raise ValueError("Error occured with lattice relaxation??".format( s.lattice)) else: lattdata.update({nom: s.lattice.abc[0]}) print('\nFinalized lattice constant set:\n{}'.format(lattdata)) return lattdata
def do_launch(args): if not args.launchpad_file and os.path.exists( os.path.join(args.config_dir, 'my_launchpad.yaml')): args.launchpad_file = os.path.join(args.config_dir, 'my_launchpad.yaml') elif not args.launchpad_file: args.launchpad_file = LAUNCHPAD_LOC if not args.fworker_file and os.path.exists( os.path.join(args.config_dir, 'my_fworker.yaml')): args.fworker_file = os.path.join(args.config_dir, 'my_fworker.yaml') elif not args.fworker_file: args.fworker_file = FWORKER_LOC if not args.queueadapter_file and os.path.exists( os.path.join(args.config_dir, 'my_qadapter.yaml')): args.queueadapter_file = os.path.join(args.config_dir, 'my_qadapter.yaml') elif not args.queueadapter_file: args.queueadapter_file = QUEUEADAPTER_LOC launchpad = LaunchPad.from_file( args.launchpad_file) if args.launchpad_file else LaunchPad( strm_lvl=args.loglvl) fworker = FWorker.from_file( args.fworker_file) if args.fworker_file else FWorker() queueadapter = load_object_from_file(args.queueadapter_file) args.loglvl = 'CRITICAL' if args.silencer else args.loglvl if args.command == 'rapidfire': rapidfire(launchpad, fworker=fworker, qadapter=queueadapter, launch_dir=args.launch_dir, nlaunches=args.nlaunches, njobs_queue=args.maxjobs_queue, njobs_block=args.maxjobs_block, sleep_time=args.sleep, reserve=args.reserve, strm_lvl=args.loglvl, timeout=args.timeout, fill_mode=args.fill_mode) else: launch_rocket_to_queue(launchpad, fworker, queueadapter, args.launch_dir, args.reserve, args.loglvl, False, args.fill_mode, args.fw_id)
def setUpClass(cls): cls.lp = None cls.fworker = FWorker() try: cls.lp = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') cls.lp.reset(password=None, require_password=False) except: raise unittest.SkipTest('MongoDB is not running in localhost:27017! Skipping tests.')
def launchpad(): """Get a launchpad""" # Manually add the package to be included fw_config.USER_PACKAGES = [ 'fireworks.user_objects', 'fireworks.utilities.tests', 'fw_tutorials', 'fireworks.features' ] lpd = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') lpd.reset(password=None, require_password=False) yield lpd lpd.connection.drop_database(TESTDB_NAME)
def setup_fireworks(cls): """ Sets up the fworker and launchpad if a connection to a local mongodb is available. cls.lp is set to None if not available """ cls.fworker = FWorker() try: cls.lp = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') cls.lp.reset(password=None, require_password=False) except: cls.lp = None
def perturb_wf_setup(perovskite, structure_type='111', Nstruct=100, perturbamnt=None, max_strain=0.06, nimages=8, tags=[]): if perturbamnt is None: perturbamnt = perovskite.lattconst * 0.04 print( "Setting up {} different perturbation polarization approaches\nMax strain = {}, " "Perturbation amount = {}".format(Nstruct, max_strain, perturbamnt)) allowed_struct_type = ['111', '211', 's2s21', 's2s22'] if structure_type not in allowed_struct_type: raise ValueError("{} not in {}".format(structure_type, allowed_struct_type)) fws = [] pert_N_structs = [ perovskite.get_struct_from_structure_type(structure_type).as_dict() ] user_incar_settings = {"ADDGRID": True, 'EDIFF': 1e-8, "NELMIN": 6} for nind in range(Nstruct): sclass = PerfectPerovskite(Asite=perovskite.eltA, Bsite=perovskite.eltB, Osite=perovskite.eltC, lattconst=perovskite.lattconst) strain_class = StrainedPerovskite.generate_random_strain( sclass, structure_type=structure_type, max_strain=max_strain, perturb_amnt=perturbamnt) tmp_wf = polarization_wf(strain_class.structure, strain_class.base, submit=False, nimages=nimages, user_incar_settings=user_incar_settings, tags=tags) fws.extend(tmp_wf.fws) pert_N_structs.append(strain_class.structure.as_dict()) print("Submitting Polarization workflow with {} fireworks".format( len(fws))) wf = Workflow(fws) lp = LaunchPad().from_file(lpad_file_path) lp.add_wf(wf)
def mlaunch(): m_description = 'This program launches multiple Rockets simultaneously' parser = ArgumentParser(description=m_description) parser.add_argument('num_jobs', help='the number of jobs to run in parallel', type=int) parser.add_argument('--nlaunches', help='number of FireWorks to run in series per parallel job (int or "infinite"; default 0 is all jobs in DB)', default=0) parser.add_argument('--sleep', help='sleep time between loops in infinite launch mode (secs)', default=None, type=int) parser.add_argument('-l', '--launchpad_file', help='path to launchpad file', default=LAUNCHPAD_LOC) parser.add_argument('-w', '--fworker_file', help='path to fworker file', default=FWORKER_LOC) parser.add_argument('-c', '--config_dir', help='path to a directory containing the config file (used if -l, -w unspecified)', default=CONFIG_FILE_DIR) parser.add_argument('--loglvl', help='level to print log messages', default='INFO') parser.add_argument('-s', '--silencer', help='shortcut to mute log messages', action='store_true') parser.add_argument('--nodefile', help='nodefile name or environment variable name containing the node file name (for populating FWData only)', default=None, type=str) parser.add_argument('--ppn', help='processors per node (for populating FWData only)', default=1, type=int) args = parser.parse_args() if not args.launchpad_file and args.config_dir and os.path.exists(os.path.join(args.config_dir, 'my_launchpad.yaml')): args.launchpad_file = os.path.join(args.config_dir, 'my_launchpad.yaml') if not args.fworker_file and args.config_dir and os.path.exists(os.path.join(args.config_dir, 'my_fworker.yaml')): args.fworker_file = os.path.join(args.config_dir, 'my_fworker.yaml') args.loglvl = 'CRITICAL' if args.silencer else args.loglvl launchpad = LaunchPad.from_file(args.launchpad_file) if args.launchpad_file else LaunchPad(strm_lvl=args.loglvl) if args.fworker_file: fworker = FWorker.from_file(args.fworker_file) else: fworker = FWorker() total_node_list = None if args.nodefile: if args.nodefile in os.environ: args.nodefile = os.environ[args.nodefile] with open(args.nodefile, 'r') as f: total_node_list = [line.strip() for line in f.readlines()] launch_multiprocess(launchpad, fworker, args.loglvl, args.nlaunches, args.num_jobs, args.sleep, total_node_list, args.ppn)
def get_lp(args): try: if not args.launchpad_file and os.path.exists(os.path.join(args.config_dir, DEFAULT_LPAD_YAML)): args.launchpad_file = os.path.join(args.config_dir, DEFAULT_LPAD_YAML) if args.launchpad_file: return LaunchPad.from_file(args.launchpad_file) else: args.loglvl = 'CRITICAL' if args.silencer else args.loglvl return LaunchPad(logdir=args.logdir, strm_lvl=args.loglvl) except: traceback.print_exc() err_message = 'FireWorks was not able to connect to MongoDB. Is the server running? The database file specified was {}.'.format(args.launchpad_file) if not args.launchpad_file: err_message += ' Type "lpad init" if you would like to set up a file that specifies location and credentials of your Mongo database (otherwise use default localhost configuration).' raise ValueError(err_message)
def polarization_wf(polar_structure, nonpolar_structure, submit=False, nimages=8, user_incar_settings={}, tags=[]): """ :param polar_structure: structure of polar structure :param nonpolar_structure: structure of nonpolar structure :param submit: boolean for submitting :param tags: list of string tags :return: """ if polar_structure.species != nonpolar_structure.species: raise ValueError("WRONG ORDER OF SPECIES: {} vs {}".format( polar_structure.species, nonpolar_structure.species)) vasp_input_set_params = {'user_incar_settings': user_incar_settings} wf = get_wf_ferroelectric( polar_structure, nonpolar_structure, vasp_cmd=">>vasp_cmd<<", db_file='>>db_file<<', vasp_input_set_polar="MPStaticSet", vasp_input_set_nonpolar="MPStaticSet", relax=False, vasp_relax_input_set_polar=vasp_input_set_params, vasp_relax_input_set_nonpolar=vasp_input_set_params, nimages=nimages, hse=False, add_analysis_task=True, tags=tags) print('workflow created with {} fws'.format(len(wf.fws))) if submit: print("\tSubmitting Polarization workflow") lp = LaunchPad().from_file(lpad_file_path) lp.add_wf(wf) else: return wf
def rlaunch(): m_description = 'This program launches one or more Rockets. A Rocket grabs a job from the central database and ' \ 'runs it. The "single-shot" option launches a single Rocket, ' \ 'whereas the "rapidfire" option loops until all FireWorks are completed.' parser = ArgumentParser(description=m_description) subparsers = parser.add_subparsers(help='command', dest='command') single_parser = subparsers.add_parser('singleshot', help='launch a single Rocket') rapid_parser = subparsers.add_parser('rapidfire', help='launch multiple Rockets (loop until all FireWorks complete)') single_parser.add_argument('-f', '--fw_id', help='specific fw_id to run', default=None, type=int) single_parser.add_argument('--offline', help='run in offline mode (FW.json required)', action='store_true') rapid_parser.add_argument('--nlaunches', help='num_launches (int or "infinite"; default 0 is all jobs in DB)', default=0) rapid_parser.add_argument('--sleep', help='sleep time between loops (secs)', default=None, type=int) parser.add_argument('-l', '--launchpad_file', help='path to launchpad file', default=LAUNCHPAD_LOC) parser.add_argument('-w', '--fworker_file', help='path to fworker file', default=FWORKER_LOC) parser.add_argument('-c', '--config_dir', help='path to a directory containing the config file (used if -l, -w unspecified)', default=CONFIG_FILE_DIR) parser.add_argument('--loglvl', help='level to print log messages', default='INFO') parser.add_argument('-s', '--silencer', help='shortcut to mute log messages', action='store_true') args = parser.parse_args() if not args.launchpad_file and os.path.exists(os.path.join(args.config_dir, 'my_launchpad.yaml')): args.launchpad_file = os.path.join(args.config_dir, 'my_launchpad.yaml') if not args.fworker_file and os.path.exists(os.path.join(args.config_dir, 'my_fworker.yaml')): args.fworker_file = os.path.join(args.config_dir, 'my_fworker.yaml') args.loglvl = 'CRITICAL' if args.silencer else args.loglvl if args.command == 'singleshot' and args.offline: launchpad = None else: launchpad = LaunchPad.from_file(args.launchpad_file) if args.launchpad_file else LaunchPad(strm_lvl=args.loglvl) if args.fworker_file: fworker = FWorker.from_file(args.fworker_file) else: fworker = FWorker() if args.command == 'rapidfire': rapidfire(launchpad, fworker, None, args.nlaunches, -1, args.sleep, args.loglvl) else: launch_rocket(launchpad, fworker, args.fw_id, args.loglvl)
def generate_lattconst_wf(list_elt_sets, functional='PBE', vasp_cmd='>>vasp_cmd<<', db_file='>>db_file<<', submit=False, scan_smart_lattice=None): """Generates a workflow which calculates lattice constants through optimization fireworks for a given functional type NOTE: that the SCAN functionality might be reliant on some Custodian features from Danny's Custodian """ if functional in ['PBE', 'LDA']: job_type = 'double_relaxation_run' potcar_type = functional incar_settings = {"ADDGRID": True, 'EDIFF': 1e-8} elif functional in ['SCAN']: job_type = 'metagga_opt_run' potcar_type = 'PBE' #this is the POTCAR that needs to be used for SCAN... incar_settings = {'EDIFF': 1e-8, 'ISIF': 7} if scan_smart_lattice is None: raise ValueError("Need to provide a smarter starting point " "for SCAN lattice constants...") fws = [] for elt_set in list_elt_sets: if functional == 'SCAN': compkey = Composition({ elt_set[0]: 1, elt_set[1]: 1, elt_set[2]: 3 }) lattconst = scan_smart_lattice[compkey] else: lattconst = None pp = PerfectPerovskite(Asite=elt_set[0], Bsite=elt_set[1], Osite=elt_set[2], lattconst=lattconst) s = pp.get_111_struct() vis = MPRelaxSet(s, user_incar_settings=incar_settings, potcar_functional=potcar_type) fw = OptimizeFW(s, name="{} {} structure optimization".format( s.composition.reduced_formula, functional), vasp_input_set=vis, vasp_cmd=vasp_cmd, db_file=db_file, job_type=job_type, auto_npar=">>auto_npar<<") fws.append(fw) wf = Workflow(fws, name='{} latt const workflow'.format(functional)) if submit: print('Submitting workflow with {} fws for {}'.format( len(list_elt_sets), functional)) lpad = LaunchPad().from_file(lpad_file_path) lpad.add_wf(wf) else: print('Workflow created with {} fws for {}'.format( len(list_elt_sets), functional)) return wf
def setup(): launchpad = LaunchPad(name='fireworks_test', strm_lvl='ERROR') launchpad.reset('', require_password=False) return launchpad
def get_wf_timing(wf_id, returnval=False): lp = LaunchPad().from_file(lpad_file_path) wf = lp.get_wf_by_fw_id(wf_id) out_run_stats = [] just_non_polar_stats = [] for fw in wf.fws: ld = fw.launches[-1].launch_dir out = None if 'OUTCAR' in os.listdir(ld): out = Outcar(os.path.join(ld, 'OUTCAR')) elif 'OUTCAR.gz' in os.listdir(ld): out = Outcar(os.path.join(ld, 'OUTCAR.gz')) if out: out_run_stats.append(out.run_stats.copy()) if 'nonpolar_polarization' in fw.name: just_non_polar_stats.append(out.run_stats.copy()) ld += '/polarization' if os.path.exists(ld): out = None if 'OUTCAR' in os.listdir(ld): out = Outcar(os.path.join(ld, 'OUTCAR')) elif 'OUTCAR.gz' in os.listdir(ld): out = Outcar(os.path.join(ld, 'OUTCAR.gz')) if out: out_run_stats.append(out.run_stats.copy()) cores = out_run_stats[0]['cores'] print('Workflow {} retrieved {} Outcars all run on {} cores'.format( wf.name, len(out_run_stats), cores)) timestat = { k: 0 for k in [ 'Elapsed time (sec)', 'System time (sec)', 'User time (sec)', 'Total CPU time used (sec)' ] } print('\nNon-Polar calc (non-polarization) alone took:') if len(just_non_polar_stats) != 1: raise ValueError("Too many non polar calcs?? = {}".format( len(just_non_polar_stats))) else: for k, v in just_non_polar_stats[0].items(): if k in timestat.keys(): print("\t{}: {}".format(k, round(v, 2))) for out in out_run_stats: if out['cores'] != cores: raise ValueError( "Inconsisten number of cores for timing! {} vs {}".format( cores, out['cores'])) for k, v in out.items(): if k in timestat: timestat[k] += v print("\nSummary of TOTAL wf timing:") for k, v in timestat.items(): print("\t{}: {}".format(k, round(v, 2))) if not returnval: return else: return {'tot': timestat, 'nonpolar': just_non_polar_stats}
def setup(): launchpad = LaunchPad(name="fireworks_test", strm_lvl="ERROR") launchpad.reset("", require_password=False) return launchpad
def mlaunch(): m_description = 'This program launches multiple Rockets simultaneously' parser = ArgumentParser(description=m_description) parser.add_argument('num_jobs', help='the number of jobs to run in parallel', type=int) parser.add_argument( '--nlaunches', help='number of FireWorks to run in series per parallel job ' '(int or "infinite"; default 0 is all jobs in DB)', default=0) parser.add_argument( '--sleep', help='sleep time between loops in infinite launch mode (secs)', default=None, type=int) parser.add_argument( '--timeout', help='timeout (secs) after which to quit (default None)', default=None, type=int) parser.add_argument('-l', '--launchpad_file', help='path to launchpad file', default=LAUNCHPAD_LOC) parser.add_argument('-w', '--fworker_file', help='path to fworker file', default=FWORKER_LOC) parser.add_argument('-c', '--config_dir', help='path to a directory containing the config file ' '(used if -l, -w unspecified)', default=CONFIG_FILE_DIR) parser.add_argument('--loglvl', help='level to print log messages', default='INFO') parser.add_argument('-s', '--silencer', help='shortcut to mute log messages', action='store_true') parser.add_argument( '--nodefile', help='nodefile name or environment variable name containing ' 'the node file name (for populating FWData only)', default=None, type=str) parser.add_argument( '--ppn', help='processors per node (for populating FWData only)', default=1, type=int) parser.add_argument( '--exclude_current_node', help="Don't use the script launching node as compute node", action="store_true") try: import argcomplete argcomplete.autocomplete(parser) # This supports bash autocompletion. To enable this, pip install # argcomplete, activate global completion, or add # eval "$(register-python-argcomplete mlaunch)" # into your .bash_profile or .bashrc except ImportError: pass args = parser.parse_args() if not args.launchpad_file and args.config_dir and os.path.exists( os.path.join(args.config_dir, 'my_launchpad.yaml')): args.launchpad_file = os.path.join(args.config_dir, 'my_launchpad.yaml') if not args.fworker_file and args.config_dir and os.path.exists( os.path.join(args.config_dir, 'my_fworker.yaml')): args.fworker_file = os.path.join(args.config_dir, 'my_fworker.yaml') args.loglvl = 'CRITICAL' if args.silencer else args.loglvl launchpad = LaunchPad.from_file( args.launchpad_file) if args.launchpad_file else LaunchPad( strm_lvl=args.loglvl) if args.fworker_file: fworker = FWorker.from_file(args.fworker_file) else: fworker = FWorker() total_node_list = None if args.nodefile: if args.nodefile in os.environ: args.nodefile = os.environ[args.nodefile] with open(args.nodefile, 'r') as f: total_node_list = [line.strip() for line in f.readlines()] launch_multiprocess(launchpad, fworker, args.loglvl, args.nlaunches, args.num_jobs, args.sleep, total_node_list, args.ppn, timeout=args.timeout, exclude_current_node=args.exclude_current_node)
def rlaunch(): m_description = 'This program launches one or more Rockets. A Rocket grabs a job from the central database and ' \ 'runs it. The "single-shot" option launches a single Rocket, ' \ 'whereas the "rapidfire" option loops until all FireWorks are completed.' parser = ArgumentParser(description=m_description) subparsers = parser.add_subparsers(help='command', dest='command') single_parser = subparsers.add_parser('singleshot', help='launch a single Rocket') rapid_parser = subparsers.add_parser( 'rapidfire', help='launch multiple Rockets (loop until all FireWorks complete)') single_parser.add_argument('-f', '--fw_id', help='specific fw_id to run', default=None, type=int) single_parser.add_argument('--offline', help='run in offline mode (FW.json required)', action='store_true') rapid_parser.add_argument( '--nlaunches', help='num_launches (int or "infinite"; default 0 is all jobs in DB)', default=0) rapid_parser.add_argument( '--timeout', help='timeout (secs) after which to quit (default None)', default=None, type=int) rapid_parser.add_argument( '--max_loops', help= 'after this many sleep loops, quit even in infinite nlaunches mode (default -1 is infinite loops)', default=-1, type=int) rapid_parser.add_argument('--sleep', help='sleep time between loops (secs)', default=None, type=int) parser.add_argument('-l', '--launchpad_file', help='path to launchpad file', default=LAUNCHPAD_LOC) parser.add_argument('-w', '--fworker_file', help='path to fworker file', default=FWORKER_LOC) parser.add_argument( '-c', '--config_dir', help= 'path to a directory containing the config file (used if -l, -w unspecified)', default=CONFIG_FILE_DIR) parser.add_argument('--loglvl', help='level to print log messages', default='INFO') parser.add_argument('-s', '--silencer', help='shortcut to mute log messages', action='store_true') args = parser.parse_args() signal.signal(signal.SIGINT, handle_interrupt) # graceful exit on ^C if not args.launchpad_file and os.path.exists( os.path.join(args.config_dir, 'my_launchpad.yaml')): args.launchpad_file = os.path.join(args.config_dir, 'my_launchpad.yaml') if not args.fworker_file and os.path.exists( os.path.join(args.config_dir, 'my_fworker.yaml')): args.fworker_file = os.path.join(args.config_dir, 'my_fworker.yaml') args.loglvl = 'CRITICAL' if args.silencer else args.loglvl if args.command == 'singleshot' and args.offline: launchpad = None else: launchpad = LaunchPad.from_file( args.launchpad_file) if args.launchpad_file else LaunchPad( strm_lvl=args.loglvl) if args.fworker_file: fworker = FWorker.from_file(args.fworker_file) else: fworker = FWorker() # prime addr lookups _log = get_fw_logger("rlaunch", stream_level="INFO") _log.info("Hostname/IP lookup (this will take a few seconds)") get_my_host() get_my_ip() if args.command == 'rapidfire': rapidfire(launchpad, fworker=fworker, m_dir=None, nlaunches=args.nlaunches, max_loops=args.max_loops, sleep_time=args.sleep, strm_lvl=args.loglvl, timeout=args.timeout) else: launch_rocket(launchpad, fworker, args.fw_id, args.loglvl)
action='store_true') args = parser.parse_args() if args.command == 'version': print FW_VERSION else: if not args.launchpad_file and os.path.exists('my_launchpad.yaml'): args.launchpad_file = 'my_launchpad.yaml' if args.launchpad_file: lp = LaunchPad.from_file(args.launchpad_file) else: args.loglvl = 'CRITICAL' if args.silencer else args.loglvl lp = LaunchPad(logdir=args.logdir, strm_lvl=args.loglvl) if args.command == 'reset': lp.reset(args.password) elif args.command == 'detect_fizzled': # TODO: report when fixed print lp.detect_fizzled(args.time, args.fix) elif args.command == 'detect_unreserved': # TODO: report when fixed print lp.detect_unreserved(args.time, args.fix) elif args.command == 'add': # TODO: make this cleaner, e.g. make TAR option explicit # fwf = Workflow.from_FireWork(FireWork.from_file(args.wf_file))
""" This code is described in the Dynamic Workflow tutorial, https://materialsproject.github.io/fireworks/dynamic_wf_tutorial.html """ from fireworks import ScriptTask from fireworks.core.firework import Firework, Workflow from fireworks.core.launchpad import LaunchPad from fireworks.core.rocket_launcher import rapidfire from fw_tutorials.dynamic_wf.printjob_task import PrintJobTask if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) # create the Workflow that passes job info fw1 = Firework([ScriptTask.from_str('echo "This is the first FireWork"')], spec={"_pass_job_info": True}, fw_id=1) fw2 = Firework([PrintJobTask()], parents=[fw1], fw_id=2) wf = Workflow([fw1, fw2]) # store workflow and launch it locally launchpad.add_wf(wf) rapidfire(launchpad)
def _get_launchpad(self): launch = yaml.load(open(LAUNCHPAD_LOC)) self.launch_pad = LaunchPad(host=launch['host'], port=launch['port'],\ name=launch['name'], username=launch['username'], password=launch['password'])
def arlaunch(): """ Function rapid-fire job launching """ m_description = 'This program launches one or more Rockets. A Rocket retrieves a job from the ' \ 'central database and runs it. The "single-shot" option launches a single Rocket, ' \ 'whereas the "rapidfire" option loops until all FireWorks are completed.' parser = ArgumentParser(description=m_description) subparsers = parser.add_subparsers(help='command', dest='command') single_parser = subparsers.add_parser('singleshot', help='launch a single Rocket') rapid_parser = subparsers.add_parser( 'rapidfire', help='launch multiple Rockets (loop until all FireWorks complete)') multi_parser = subparsers.add_parser( 'multi', help='launches multiple Rockets simultaneously') single_parser.add_argument('-f', '--fw_id', help='specific fw_id to run', default=None, type=int) single_parser.add_argument('--offline', help='run in offline mode (FW.json required)', action='store_true') single_parser.add_argument('--pdb', help='shortcut to invoke debugger on error', action='store_true') rapid_parser.add_argument('--nlaunches', help='num_launches (int or "infinite"; ' 'default 0 is all jobs in DB)', default=0) rapid_parser.add_argument( '--timeout', help='timeout (secs) after which to quit (default None)', default=None, type=int) rapid_parser.add_argument( '--max_loops', help='after this many sleep loops, quit even in ' 'infinite nlaunches mode (default -1 is infinite loops)', default=-1, type=int) rapid_parser.add_argument('--sleep', help='sleep time between loops (secs)', default=None, type=int) rapid_parser.add_argument( '--local_redirect', help="Redirect stdout and stderr to the launch directory", action="store_true") multi_parser.add_argument('num_jobs', help='the number of jobs to run in parallel', type=int) multi_parser.add_argument('--nlaunches', help='number of FireWorks to run in series per ' 'parallel job (int or "infinite"; default 0 is ' 'all jobs in DB)', default=0) multi_parser.add_argument( '--sleep', help='sleep time between loops in infinite launch mode' '(secs)', default=None, type=int) multi_parser.add_argument( '--timeout', help='timeout (secs) after which to quit (default None)', default=None, type=int) multi_parser.add_argument( '--nodefile', help='nodefile name or environment variable name ' 'containing the node file name (for populating' ' FWData only)', default=None, type=str) multi_parser.add_argument( '--ppn', help='processors per node (for populating FWData only)', default=1, type=int) multi_parser.add_argument('--exclude_current_node', help="Don't use the script launching node" "as compute node", action="store_true") multi_parser.add_argument( '--local_redirect', help="Redirect stdout and stderr to the launch directory", action="store_true") parser.add_argument('-l', '--launchpad_file', help='path to launchpad file') parser.add_argument('-w', '--fworker_file', required=True, help='path to fworker file') parser.add_argument('-c', '--config_dir', help='path to a directory containing the config file ' '(used if -l, -w unspecified)', default=CONFIG_FILE_DIR) parser.add_argument('--loglvl', help='level to print log messages', default='INFO') parser.add_argument('-s', '--silencer', help='shortcut to mute log messages', action='store_true') try: import argcomplete argcomplete.autocomplete(parser) # This supports bash autocompletion. To enable this, pip install # argcomplete, activate global completion, or add # eval "$(register-python-argcomplete rlaunch)" # into your .bash_profile or .bashrc except ImportError: pass args = parser.parse_args() signal.signal(signal.SIGINT, handle_interrupt) # graceful exit on ^C if not args.launchpad_file and os.path.exists( os.path.join(args.config_dir, 'my_launchpad.yaml')): args.launchpad_file = os.path.join(args.config_dir, 'my_launchpad.yaml') elif not args.launchpad_file: args.launchpad_file = LAUNCHPAD_LOC args.loglvl = 'CRITICAL' if args.silencer else args.loglvl if args.command == 'singleshot' and args.offline: launchpad = None else: launchpad = LaunchPad.from_file( args.launchpad_file) if args.launchpad_file else LaunchPad( strm_lvl=args.loglvl) fworker = AiiDAFWorker.from_file(args.fworker_file) # prime addr lookups _log = get_fw_logger("rlaunch", stream_level="INFO") _log.info("Hostname/IP lookup (this will take a few seconds)") get_my_host() get_my_ip() if args.command == 'rapidfire': rapidfire(launchpad, fworker=fworker, m_dir=None, nlaunches=args.nlaunches, max_loops=args.max_loops, sleep_time=args.sleep, strm_lvl=args.loglvl, timeout=args.timeout, local_redirect=args.local_redirect) elif args.command == 'multi': total_node_list = None if args.nodefile: if args.nodefile in os.environ: args.nodefile = os.environ[args.nodefile] with open(args.nodefile, 'r') as fhandle: total_node_list = [ line.strip() for line in fhandle.readlines() ] launch_multiprocess(launchpad, fworker, args.loglvl, args.nlaunches, args.num_jobs, args.sleep, total_node_list, args.ppn, timeout=args.timeout, exclude_current_node=args.exclude_current_node, local_redirect=args.local_redirect) else: launch_rocket(launchpad, fworker, args.fw_id, args.loglvl, pdb_on_exception=args.pdb)
def rlaunch(): m_description = ( "This program launches one or more Rockets. A Rocket retrieves a job from the " 'central database and runs it. The "single-shot" option launches a single Rocket, ' 'whereas the "rapidfire" option loops until all FireWorks are completed.' ) parser = ArgumentParser(description=m_description) subparsers = parser.add_subparsers(help="command", dest="command") single_parser = subparsers.add_parser("singleshot", help="launch a single Rocket") rapid_parser = subparsers.add_parser( "rapidfire", help="launch multiple Rockets (loop until all FireWorks complete)") multi_parser = subparsers.add_parser( "multi", help="launches multiple Rockets simultaneously") single_parser.add_argument("-f", "--fw_id", help="specific fw_id to run", default=None, type=int) single_parser.add_argument("--offline", help="run in offline mode (FW.json required)", action="store_true") single_parser.add_argument("--pdb", help="shortcut to invoke debugger on error", action="store_true") rapid_parser.add_argument("--nlaunches", help='num_launches (int or "infinite"; ' "default 0 is all jobs in DB)", default=0) rapid_parser.add_argument( "--timeout", help="timeout (secs) after which to quit (default None)", default=None, type=int) rapid_parser.add_argument( "--max_loops", help= "after this many sleep loops, quit even in infinite nlaunches mode (default -1 is infinite loops)", default=-1, type=int, ) rapid_parser.add_argument("--sleep", help="sleep time between loops (secs)", default=None, type=int) rapid_parser.add_argument( "--local_redirect", help="Redirect stdout and stderr to the launch directory", action="store_true") multi_parser.add_argument("num_jobs", help="the number of jobs to run in parallel", type=int) multi_parser.add_argument( "--nlaunches", help="number of FireWorks to run in series per " 'parallel job (int or "infinite"; default 0 is ' "all jobs in DB)", default=0, ) multi_parser.add_argument( "--sleep", help="sleep time between loops in infinite launch mode (secs)", default=None, type=int) multi_parser.add_argument( "--timeout", help="timeout (secs) after which to quit (default None)", default=None, type=int) multi_parser.add_argument( "--nodefile", help="nodefile name or environment variable name " "containing the node file name (for populating" " FWData only)", default=None, type=str, ) multi_parser.add_argument( "--ppn", help="processors per node (for populating FWData only)", default=1, type=int) multi_parser.add_argument( "--exclude_current_node", help="Don't use the script launching node as compute node", action="store_true") multi_parser.add_argument( "--local_redirect", help="Redirect stdout and stderr to the launch directory", action="store_true") parser.add_argument("-l", "--launchpad_file", help="path to launchpad file") parser.add_argument("-w", "--fworker_file", help="path to fworker file") parser.add_argument( "-c", "--config_dir", help= "path to a directory containing the config file (used if -l, -w unspecified)", default=CONFIG_FILE_DIR, ) parser.add_argument("--loglvl", help="level to print log messages", default="INFO") parser.add_argument("-s", "--silencer", help="shortcut to mute log messages", action="store_true") try: import argcomplete argcomplete.autocomplete(parser) # This supports bash autocompletion. To enable this, pip install # argcomplete, activate global completion, or add # eval "$(register-python-argcomplete rlaunch)" # into your .bash_profile or .bashrc except ImportError: pass args = parser.parse_args() signal.signal(signal.SIGINT, handle_interrupt) # graceful exit on ^C if not args.launchpad_file and os.path.exists( os.path.join(args.config_dir, "my_launchpad.yaml")): args.launchpad_file = os.path.join(args.config_dir, "my_launchpad.yaml") elif not args.launchpad_file: args.launchpad_file = LAUNCHPAD_LOC if not args.fworker_file and os.path.exists( os.path.join(args.config_dir, "my_fworker.yaml")): args.fworker_file = os.path.join(args.config_dir, "my_fworker.yaml") elif not args.fworker_file: args.fworker_file = FWORKER_LOC args.loglvl = "CRITICAL" if args.silencer else args.loglvl if args.command == "singleshot" and args.offline: launchpad = None else: launchpad = LaunchPad.from_file( args.launchpad_file) if args.launchpad_file else LaunchPad( strm_lvl=args.loglvl) if args.fworker_file: fworker = FWorker.from_file(args.fworker_file) else: fworker = FWorker() # prime addr lookups _log = get_fw_logger("rlaunch", stream_level="INFO") _log.info("Hostname/IP lookup (this will take a few seconds)") get_my_host() get_my_ip() if args.command == "rapidfire": rapidfire( launchpad, fworker=fworker, m_dir=None, nlaunches=args.nlaunches, max_loops=args.max_loops, sleep_time=args.sleep, strm_lvl=args.loglvl, timeout=args.timeout, local_redirect=args.local_redirect, ) elif args.command == "multi": total_node_list = None if args.nodefile: if args.nodefile in os.environ: args.nodefile = os.environ[args.nodefile] with open(args.nodefile) as f: total_node_list = [line.strip() for line in f.readlines()] launch_multiprocess( launchpad, fworker, args.loglvl, args.nlaunches, args.num_jobs, args.sleep, total_node_list, args.ppn, timeout=args.timeout, exclude_current_node=args.exclude_current_node, local_redirect=args.local_redirect, ) else: launch_rocket(launchpad, fworker, args.fw_id, args.loglvl, pdb_on_exception=args.pdb)