def wf_creator_complex(x): """ Testing a custom workflow of five fireworks with complex dependencies, and optimization in the middle. This "complex" Workflow has the form: fw0 / \ fw1 fw2 \ / fw3 (optimization) | fw4 | fw5 """ spec = {'_x': x} fw0 = Firework(AdditionTask(), spec={"input_array": [1, 2]}, name='Parent') fw1 = Firework(AdditionTask(), spec={"input_array": [2, 3]}, name='Child A') fw2 = Firework(AdditionTask(), spec={"input_array": [3, 4]}, name='Child B') bt = BasicTestTask() ot = OptTask(**db_info) fw3 = Firework([bt, ot], spec=spec, name="Optimization") fw4 = Firework(AdditionTask(), spec={"input_array": [5, 6]}, name='After 1') fw5 = Firework(ScriptTask.from_str('echo "ScriptTask: Finished complex ' 'workflow w/ optimization."'), name='After 2') return Workflow([fw0, fw1, fw2, fw3, fw4, fw5], {fw0: [fw1, fw2], fw1: [fw3], fw2: [fw3], fw3: [fw4], fw4: [fw5], fw5: []})
def task(self, fw_spec): self['defuse_bad_rc'] = True # Execute the macroscopic code by calling function in base class ret = ScriptTask.run_task(self, fw_spec) self.handleReturnCode(ret.stored_data['returncode'])
def main(): # set up the LaunchPad and reset it swarmpad = SwarmPad() swarmpad.reset('', require_password=False) firetask1 = ScriptTask.from_str('echo "starting"; sleep 30; echo "ending"') firetask2 = ScriptTask.from_str('echo "hello from BACKGROUND thread #1"') firetask3 = ScriptTask.from_str('echo "hello from BACKGROUND thread #2"') # Combine [firetasks] parallely to form a ParallelTask par_task = ParallelTask.from_firetasks([firetask1, firetask2]) # Combine the par_task and firetask3 sequentially firework = Firework([par_task, firetask3]) # store workflow and launch it locally swarmpad.add_wf(firework) rapidfire(swarmpad, FWorker())
def run_task(self, fw_spec): job_info_array = fw_spec['_job_info'] prev_job_info = job_info_array[-1] path2setup = prev_job_info['launch_dir'] for file in glob.glob(path2setup + '/*'): shutil.copy(file, '.') #ST = ScriptTask.from_str('jobex -ri -c 200 > jobex.out') ST = ScriptTask.from_str('touch foo') Action = ST.run_task(fw_spec) return Action
def dummy_workflow(): """ dummy fireworks Workflow """ # create the Firework consisting of multiple tasks firetask1 = TemplateWriterTask({'context': {'opt1': 5.0, 'opt2': 'fast method'}, 'template_file': 'simple_template.txt', 'output_file': 'inputs.txt'}) firetask2 = ScriptTask.from_str('wc -w < inputs.txt > words.txt') firetask3 = FileTransferTask({'files': [{'src': 'words.txt', 'dest': '~/words.txt'}], 'mode': 'copy'}) wf = Firework([firetask1, firetask2, firetask3]) return wf
def TMsetup(self, parent): # setup TM setup0 = FileTransferTask({ 'files': ['../' + self.fxyz, '../' + self.fprot], 'dest': '.', 'mode': 'copy' }) setup1 = ScriptTask.from_str(x2t(self.fxyz)) # x2t setup2 = ScriptTask.from_str(define(self.fprot)) # define setup3 = ScriptTask.from_str(info(self.wfname, 'TM_setup')) setup = Firework([setup0, setup1, setup2, setup3], name='TM_setup', spec={ "_pass_job_info": True, '_fworker': 'cmcc_front' }, parents=[parent]) return setup
def FFsetup(self, parent): # setup FF FF1 = CopyTask() FF2 = ScriptTask.from_str(info(self.wfname, 'FF_setup')) FF3 = ScriptTask.from_str( 'xyz2txyz -i final.xyz -o %s > /dev/null 2> xyz2txyz.err' % self.ftxyz) FF4 = ScriptTask.from_str( 'atomtyper -c %s -o %s > /dev/null 2> atomtyper.err' % (self.ftxyz, self.ftxyz)) FF5 = ScriptTask.from_str( 'create_key -i %s -o %s -r %s > key.out 2> key.err' % (self.ftxyz, self.fkey, self.wfname)) FF = Firework([FF1, FF2, FF3, FF4, FF5], name='FF_setup', parents=[parent], spec={ "_pass_job_info": True, '_fworker': 'cmcc_front' }) return FF
def extract(self, parent): # extract infos extract1 = ExtractTask(fxyz=self.fxyz, fref=self.wfname) extract2 = ScriptTask.from_str(info(self.wfname, 'extract')) extract = Firework([extract1, extract2], name='extract', parents=[parent], spec={ "_pass_job_info": True, '_fworker': 'cmcc_front' }) return extract
def run_task(self, fw_spec): job_info_array = fw_spec['_job_info'] prev_job_info = job_info_array[-1] path2setup = prev_job_info['launch_dir'] #print path2setup shutil.copy(path2setup + '/' + self["fxyz"], ".") #print path2setup ST = ScriptTask.from_str('create_ref -c ' + self["fxyz"] + ' -r ' + self["fref"] + ' -p ' + path2setup + ' > create_ref.out 2> create_ref.err') Action = ST.run_task(fw_spec) shutil.copy(path2setup + '/final.xyz', ".") return Action
def wf_creator_complex(x, launchpad): """ Testing a custom workflow of five fireworks with complex dependencies, and optimization in the middle. This "complex" Workflow has the form: fw0 / \ fw1 fw2 \ / fw3 (optimization) | fw4 | fw5 """ spec = {'_x_opt': x} dims = [(1, 10), (10.0, 20.0), ['blue', 'green', 'red', 'orange']] fw0 = Firework(AdditionTask(), spec={"input_array": [1, 2]}, name='Parent') fw1 = Firework(AdditionTask(), spec={"input_array": [2, 3]}, name='Child A') fw2 = Firework(AdditionTask(), spec={"input_array": [3, 4]}, name='Child B') bt = BasicTestTask() ot = OptTask(wf_creator='rocketsled.tests.tests.wf_creator_complex', dimensions=dims, lpad=launchpad, wf_creator_args=[launchpad], opt_label='test_complex') fw3 = Firework([bt, ot], spec=spec, name="Optimization") fw4 = Firework(AdditionTask(), spec={"input_array": [5, 6]}, name='After 1') fw5 = Firework(ScriptTask.from_str('echo "ScriptTask: Finished complex ' 'workflow w/ optimization."'), name='After 2') return Workflow([fw0, fw1, fw2, fw3, fw4, fw5], { fw0: [fw1, fw2], fw1: [fw3], fw2: [fw3], fw3: [fw4], fw4: [fw5], fw5: [] })
def wf_creator_complex(x): """ Testing a custom workflow of five fireworks with complex dependencies, and optimization in the middle. This "complex" Workflow has the form: fw0 / \ fw1 fw2 \ / fw3 (optimization) | fw4 | fw5 """ spec = {"_x": x} fw0 = Firework(AdditionTask(), spec={"input_array": [1, 2]}, name="Parent") fw1 = Firework(AdditionTask(), spec={"input_array": [2, 3]}, name="Child A") fw2 = Firework(AdditionTask(), spec={"input_array": [3, 4]}, name="Child B") bt = BasicTestTask() ot = OptTask(**db_info) fw3 = Firework([bt, ot], spec=spec, name="Optimization") fw4 = Firework(AdditionTask(), spec={"input_array": [5, 6]}, name="After 1") fw5 = Firework( ScriptTask.from_str('echo "ScriptTask: Finished complex ' 'workflow w/ optimization."'), name="After 2", ) return Workflow( [fw0, fw1, fw2, fw3, fw4, fw5], { fw0: [fw1, fw2], fw1: [fw3], fw2: [fw3], fw3: [fw4], fw4: [fw5], fw5: [] }, )
def test_set_queue_adapter(self): # test fw_name_constraint fw1 = Firework([ScriptTask(script=None)], fw_id=-1, name="Firsttask") fw2 = Firework([ScriptTask(script=None)], parents=[fw1], fw_id=-2, name="Secondtask") fw3 = Firework([ScriptTask(script=None)], parents=[fw1], fw_id=-3, name="Thirdtask") wf = Workflow([fw1, fw2, fw3]) wf = set_queue_adapter(wf, {"test": { "test": 1 }}, fw_name_constraint="Secondtask") self.assertDictEqual(wf.id_fw[-1].spec, {}) self.assertDictEqual(wf.id_fw[-2].spec, {"_queueadapter": { "test": { "test": 1 } }}) self.assertDictEqual(wf.id_fw[-3].spec, {}) # test task_name_constraint fw1 = Firework([ScriptTask(script=None)], fw_id=-1, name="Firsttask") fw2 = Firework( [ScriptTask(script=None), ModifiedScriptTask(script=None)], parents=[fw1], fw_id=-2, name="Secondtask", ) fw3 = Firework([ScriptTask(script=None)], parents=[fw1], fw_id=-3, name="Thirdtask") wf = Workflow([fw1, fw2, fw3]) wf = set_queue_adapter(wf, {"test": { "test": 1 }}, task_name_constraint="ModifiedScriptTask") self.assertDictEqual(wf.id_fw[-1].spec, {}) self.assertDictEqual(wf.id_fw[-2].spec, {"_queueadapter": { "test": { "test": 1 } }}) self.assertDictEqual(wf.id_fw[-3].spec, {})
def run_task(self, fw_spec): if self["message"] == "next": print("The message was next!") print(self["message"]) fw = Firework([ ScriptTask.from_str("echo next"), MiddleTask(message="other", fw_action=self.get("fw_action", FWAction())) ]) return FWAction(additions=fw) else: print("The message was Something Else!") print(self["message"]) print() print(self.get("fw_action", FWAction())) print() return FWAction.from_dict(self.get("fw_action", {}))
def setUp(self): self.mol_file = 'SiC_0.cif' self.config_file = os.path.join(test_dir, 'vasp_interface_defaults.yaml') self.input = VaspInputInterface(s=self.mol_file, config_file=self.config_file) self.misc_task = ScriptTask.from_str("echo 'Hello World!'")
def main(sequencing_directory, library_prefix, num_libraries, raw_data_dir): lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) workflow_fireworks = [] workflow_dependencies = collections.defaultdict(list) library_dirs = [ os.path.join(sequencing_directory, library_prefix + str(i + 1)) for i in xrange(num_libraries) ] subdirs = ['unzipped', 'trimmed', 'aligned', 'pythonized', 'sorted'] for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) name = "Gunzip_%s" % os.path.basename(library_dir) fw_gunzip = Firework( [ ScriptTask(script="find " + os.path.join(library_dir, raw_data_dir) + " -name '*.gz' -print0 | xargs -0 gunzip"), ScriptTask( script="mv " + os.path.join(library_dir, raw_data_dir) + "/*.fastq " + os.path.join(library_dir, "unzipped")), ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_gunzip) name = "Trim_%s" % os.path.basename(library_dir) fw_trim = Firework( [ TrimTask(library_path=library_dir, unzipped_name="unzipped", trimmed_name="trimmed") ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_trim) workflow_dependencies[fw_gunzip].append(fw_trim) name = "Align_%s" % os.path.basename(library_dir) fw_align = Firework( [ AlignTask(library_path=library_dir, trimmed_name="trimmed", aligned_name="aligned") ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_align) workflow_dependencies[fw_trim].append(fw_align) name = "Sort_%s" % os.path.basename(library_dir) fw_sort = Firework( [ SortTask(library_path=library_dir, aligned_name="aligned", bammed_name="bammed", sorted_name="sorted") ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_sort) workflow_dependencies[fw_align].append(fw_sort) name = "Count_%s" % os.path.basename(library_dir) fw_count = Firework( [ CountTask(library_path=library_dir, aligned_name="aligned", bammed_name="bammed", counted_name="counted") ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_count) workflow_dependencies[fw_sort].append(fw_count) lpad.add_wf(Workflow(workflow_fireworks, links_dict=workflow_dependencies))
from fireworks import Firework, LaunchPad, ScriptTask from fireworks.core.rocket_launcher import launch_rocket, rapidfire # set up the LaunchPad and reset it launchpad = LaunchPad(strm_lvl='WARNING') # set messaging lowest level to WARNING launchpad.reset('', require_password=False) # create the Firework consisting of a single task firetask = ScriptTask.from_str('cd /projects/development/LDRDSANS/fireworks/localtest; ./test_cluster.sh') firework = Firework(firetask) fw_yaml = firework.to_file("my_firework.yaml") # save to yaml file, and get the string representation fw_json = firework.to_file("my_firework.json") # save to json file, and get the string representation # store workflow and launch it locally launchpad.add_wf(firework) launch_rocket(launchpad) # same as "rlaunch singleshot" #rapidfire(launchpad, FWorker(), strm_lvl='WARNING') # same as "rlaunch rapidfire" # loading from file # any class in FireWorks that subclasses FWSerializable has methods from_file and to_file #firework = Firework.from_file("fw_test.yaml") #fw_yaml = Firework.from_file("fw_test.json")
""" This code is described in the Introductory tutorial, https://materialsproject.github.io/fireworks/introduction.html """ from fireworks import Firework, LaunchPad, ScriptTask from fireworks.core.rocket_launcher import launch_rocket if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) # create the Firework consisting of a single task firetask = ScriptTask.from_str('echo "howdy, your job launched successfully!"') firework = Firework(firetask) # store workflow and launch it locally launchpad.add_wf(firework) launch_rocket(launchpad)
""" This code is described in the Dynamic Workflow tutorial, https://materialsproject.github.io/fireworks/dynamic_wf_tutorial.html """ from fireworks import ScriptTask from fireworks.core.firework import Firework, Workflow from fireworks.core.launchpad import LaunchPad from fireworks.core.rocket_launcher import rapidfire from fw_tutorials.dynamic_wf.printjob_task import PrintJobTask if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) # create the Workflow that passes job info fw1 = Firework([ScriptTask.from_str('echo "This is the first FireWork"')], spec={"_pass_job_info": True}, fw_id=1) fw2 = Firework([PrintJobTask()], parents=[fw1], fw_id=2) wf = Workflow([fw1, fw2]) # store workflow and launch it locally launchpad.add_wf(wf) rapidfire(launchpad)
def make_md_workflow(sim, archive, stages, md_engine='gromacs', md_category='md', local_category='local', postrun_wf=None, post_wf=None, files=None): """Construct a general, single MD simulation workflow. Assumptions ----------- Queue launcher submission script must define and export the following environment variables: 1. STAGING : absolute path on resource to staging directory 2. SCRATCH : absolute path on resource to scratch directory The staging directory must already exist on all resources specified in ``stages``. The script ``run_md.sh`` must be somewhere on your path, and must take a single argument giving the directory to execute MD out of. It should create and change the working directory to that directory before anything else. Parameters ---------- sim : str MDSynthesis Sim. archive : str Absolute path to directory to launch from, which holds all required files for running MD. stages : list, str Dicts giving for each of the following keys: - 'server': server host to transfer to - 'user': username to authenticate with - 'staging': absolute path to staging area on remote resource alternatively, a path to a yaml file giving a list of dictionaries with the same information. md_engine : {'gromacs'} MD engine name; needed to determine continuation mechanism to use. md_category : str Category to use for the MD Firework. Used to target to correct rockets. local_category : str Category to use for non-MD Fireworks, which should be run by rockets where the ``archive`` directory is accessible. postrun_wf : Workflow Workflow to perform after each copyback; performed in parallel to continuation run. post_wf : Workflow Workflow to perform after completed MD (no continuation); use for final postprocessing. files : list Names of files (not paths) needed for each leg of the simulation. Need not exist, but if they do they will get staged before each run. Returns ------- workflow MD workflow; can be submitted to LaunchPad of choice. """ sim = mds.Sim(sim) #TODO: perhaps move to its own FireTask? sim.categories['md_status'] = 'running' #TODO: the trouble with this is that if this workflow is created with the intent # of being attached to another, these files may not exist at all yet f_exist = [f for f in files if os.path.exists(os.path.join(archive, f))] if isinstance(stages, string_types): with open(stages, 'r') as f: stages = yaml.load(f) ## Stage files on all resources where MD may run; takes place locally fts_stage = list() for stage in stages: fts_stage.append( FileTransferTask(mode='rtransfer', server=stage['server'], user=stage['user'], files=[os.path.join(archive, i) for i in files], dest=os.path.join(stage['staging'], sim.uuid), max_retry=5, shell_interpret=True)) fw_stage = Firework(fts_stage, spec={ '_launch_dir': archive, '_category': local_category }, name='staging') ## MD execution; takes place in queue context of compute resource # make rundir ft_mkdir = MkRunDirTask(uuid=sim.uuid) # copy input files to scratch space ft_copy = FileTransferTask( mode='copy', files=[os.path.join('${STAGING}/', sim.uuid, i) for i in files], dest=os.path.join('${SCRATCHDIR}/', sim.uuid), ignore_missing=True, shell_interpret=True) # next, run MD ft_md = ScriptTask(script='run_md.sh {}'.format( os.path.join('${SCRATCHDIR}/', sim.uuid)), use_shell=True, fizzle_bad_rc=True) # send info on where files live to pull firework ft_info = BeaconTask(uuid=sim.uuid) fw_md = Firework([ft_mkdir, ft_copy, ft_md, ft_info], spec={'_category': md_category}, name='md') ## Pull files back to archive; takes place locally ft_copyback = FilePullTask(dest=archive) fw_copyback = Firework([ft_copyback], spec={ '_launch_dir': archive, '_category': local_category }, name='pull') ## Decide if we need to continue and submit new workflow if so; takes place ## locally if md_engine == 'gromacs': ft_continue = GromacsContinueTask(sim=sim, archive=archive, stages=stages, md_engine=md_engine, md_category=md_category, local_category=local_category, postrun_wf=postrun_wf, post_wf=post_wf, files=files) else: raise ValueError("No known md engine `{}`.".format(md_engine)) fw_continue = Firework([ft_continue], spec={ '_launch_dir': archive, '_category': local_category }, name='continue') wf = Workflow([fw_stage, fw_md, fw_copyback, fw_continue], links_dict={ fw_stage: [fw_md], fw_md: [fw_copyback], fw_copyback: [fw_continue] }, name='{} | md'.format(sim.name), metadata=dict(sim.categories)) ## Mix in postrun workflow, if given if postrun_wf: if isinstance(postrun_wf, dict): postrun_wf = Workflow.from_dict(postrun_wf) wf.append_wf(Workflow.from_wflow(postrun_wf), [fw_copyback.fw_id]) return wf
def __init__(self, final_message): super(FirstFirework, self).__init__( tasks=[ScriptTask.from_str("echo 'Here we go!'"), MiddleTask(message="next", fw_action=FWAction(additions=Firework([ScriptTask.from_str( "echo '" + final_message + "'")])))])
from swarmform import SwarmPad from fireworks import Firework, ScriptTask, FWorker from fireworks.core.rocket_launcher import launch_rocket if __name__ == "__main__": # set up the SwarmPad and reset it swarmpad = SwarmPad() swarmpad.reset('', require_password=False) # create the Firework consisting of a custom "Addition" task firework = Firework(ScriptTask.from_str('echo "hello"')) # store workflow swarmpad.add_sf(firework) # Retrieve SwarmFlow from the SwarmPad sf = swarmpad.get_sf_by_id(1) sf = swarmpad.get_sf_by_name('Unnamed FW') # Run the swarmFlow launch_rocket(swarmpad, FWorker())
""" This code is described in the Workflow tutorial, https://materialsproject.github.io/fireworks/workflow_tutorial.html """ from fireworks import Firework, FWorker, LaunchPad, ScriptTask, Workflow from fireworks.core.rocket_launcher import rapidfire if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) # define four individual FireWorks used in the Workflow task1 = ScriptTask.from_str('echo "Ingrid is the CEO."') task2 = ScriptTask.from_str('echo "Jill is a manager."') task3 = ScriptTask.from_str('echo "Jack is a manager."') task4 = ScriptTask.from_str('echo "Kip is an intern."') fw1 = Firework(task1) fw2 = Firework(task2) fw3 = Firework(task3) fw4 = Firework(task4) # assemble Workflow from FireWorks and their connections by id workflow = Workflow([fw1, fw2, fw3, fw4], {fw1: [fw2, fw3], fw2: [fw4], fw3: [fw4]}) # store workflow and launch it locally launchpad.add_wf(workflow) rapidfire(launchpad, FWorker())
) wf_fws.append(fw_fit_level_1) wf_links[fw_init_raw_data].append(fw_fit_level_1) # Unfit KB compression fw_raw_data_compression = None if COMPRESS_OUTPUT: fw_name = "ScriptTask_compression_raw_data" if VERBOSE_QUEUE: print "Queueing {}".format(fw_name) fw_raw_data_compression = Firework( ScriptTask( script = "bzip2 -v " + os.path.join(KB_DIRECTORY, filename_raw_data) ), name = fw_name, spec = {"_queueadapter": {"job_name": fw_name}, "_priority":0} ) wf_fws.append(fw_raw_data_compression) wf_links[fw_fit_level_1].append(fw_raw_data_compression) # Fit Level 1 KB compression fw_sim_data_1_compression = None if COMPRESS_OUTPUT: fw_name = "ScriptTask_compression_sim_data_1" if VERBOSE_QUEUE:
def run_task(self, fw_spec): """ Args: fw_spec: Returns: FWAction """ # Extract the parameters into variables; this makes for cleaner code IMO directory = self["directory"] in_custodian = self.get("in_custodian", False) number_nodes = self.get("number_nodes", None) tolerance = self.get("tolerance", PulayTask.pulay_tolerance) fw_action = self.get('fw_action', {}) # Check if the lattice vectors have changed significantly initial_structure = Structure.from_file( os.path.join(directory, "POSCAR")) final_structure = Structure.from_file( os.path.join(directory, "CONTCAR")) sum_differences = np.linalg.norm(initial_structure.lattice.matrix - final_structure.lattice.matrix) # If the difference is small, return an empty FWAction if sum_differences < tolerance: return FWAction.from_dict(fw_action) # Else, set up another geometry optimization else: print( "Lattice vectors have changed significantly during geometry " "optimization. Performing another full geometry optimization to " "make sure there were no Pulay stresses present.\n\n") # Create the ScriptTask that copies the CONTCAR to the POSCAR copy_contcar = ScriptTask.from_str( "cp " + os.path.join(directory, "CONTCAR") + " " + os.path.join(directory, "POSCAR")) # Create the PyTask that runs the calculation if in_custodian: vasprun = CustodianTask(directory=directory) else: vasprun = VaspTask(directory=directory) # Create the PyTask that check the Pulay stresses again pulay_task = PulayTask(directory=directory, in_custodian=in_custodian, number_nodes=number_nodes, tolerance=tolerance, fw_action=fw_action) # Add number of nodes to spec, or "none" firework_spec = {"_launch_dir": os.getcwd()} if number_nodes is None: firework_spec.update({"_category": "none"}) else: firework_spec.update( {"_category": str(number_nodes) + "nodes"}) # Combine the two FireTasks into one FireWork relax_firework = Firework( tasks=[copy_contcar, vasprun, pulay_task], name="Pulay Step", spec=firework_spec) return FWAction(additions=relax_firework)
""" This code is described in the Workflow tutorial, https://materialsproject.github.io/fireworks/workflow_tutorial.html """ from fireworks import Firework, Workflow, FWorker, LaunchPad, ScriptTask from fireworks.core.rocket_launcher import rapidfire if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) # define four individual FireWorks used in the Workflow task1 = ScriptTask.from_str('echo "Ingrid is the CEO."') task2 = ScriptTask.from_str('echo "Jill is a manager."') task3 = ScriptTask.from_str('echo "Jack is a manager."') task4 = ScriptTask.from_str('echo "Kip is an intern."') fw1 = Firework(task1) fw2 = Firework(task2) fw3 = Firework(task3) fw4 = Firework(task4) # assemble Workflow from FireWorks and their connections by id workflow = Workflow([fw1, fw2, fw3, fw4], {fw1: [fw2, fw3], fw2: [fw4], fw3: [fw4]}) # store workflow and launch it locally launchpad.add_wf(workflow) rapidfire(launchpad, FWorker())
def run_task(self, fw_spec): directory = self.get("directory", os.getcwd()) custodian = self.get("custodian", False) condition = self.get("condition", "energy") or "energy" tolerance = self.get( "tolerance", PulayTask.pulay_tolerance_dict[condition] ) or PulayTask.pulay_tolerance_dict[condition] perform_pulay_step = False if condition == "ionic_steps": vasprun = Vasprun(os.path.join(directory, "vasprun.xml")) if vasprun.nionic_steps > tolerance: print("Number of ionic steps of geometry optimization is more " "than specified tolerance (" + str(tolerance) + "). Performing another geometry optimization.") perform_pulay_step = True elif condition == "energy": vasprun = Vasprun(os.path.join(directory, "vasprun.xml")) ionic_energies = [step['e_wo_entrp'] for step in vasprun.ionic_steps] structure = vasprun.final_structure if abs(ionic_energies[-1] - ionic_energies[0]) / len(structure) \ > tolerance: print("Difference in energy per atom between first ionic step and " "final ionic step is larger than specified tolerance (" + str(tolerance) + "). Performing another geometry " "optimization.") perform_pulay_step = True elif condition == "lattice": # Check if the lattice vectors have changed significantly initial_structure = Structure.from_file( os.path.join(directory, "POSCAR") ) final_structure = Structure.from_file( os.path.join(directory, "CONTCAR") ) sum_differences = np.linalg.norm( initial_structure.lattice.matrix - final_structure.lattice.matrix ) if sum_differences > tolerance: print("Lattice vectors have changed significantly during geometry " "optimization. Performing another full geometry optimization " "to make sure there were no Pulay stresses present.\n\n") perform_pulay_step = True if perform_pulay_step: tasks = list() # Change to quasi-Newton scheme incar = Incar.from_file(os.path.join(directory, "INCAR")) incar.update({"IBRION": 1}) incar.write_file(os.path.join(directory, "INCAR")) # Create the ScriptTask that copies the CONTCAR to the POSCAR tasks.append(ScriptTask.from_str( "cp " + os.path.join(directory, "CONTCAR") + " " + os.path.join(directory, "POSCAR") )) # Run the calculation if custodian is True: tasks.append(VaspCustodianTask()) elif isinstance(custodian, list): assert all([isinstance(h, ErrorHandler) for h in custodian]), \ "Not all elements in 'custodian' list are instances of " \ "the ErrorHandler class!" tasks.append(VaspCustodianTask(handlers=custodian)) else: tasks.append(VaspTask()) # Add the final geometry to the fw_spec of this firework and its children tasks.append(AddFinalGeometryToSpec(directory=directory)) # Create the PyTask that check the Pulay stresses again tasks.append(PulayTask( directory=directory, custodian=custodian, condition=condition, tolerance=tolerance )) # Combine the two FireTasks into one FireWork optimize_fw = Firework(tasks=tasks, name="Pulay Step", spec=fw_spec) return FWAction(detours=optimize_fw)
from fireworks import Firework, Workflow, LaunchPad, ScriptTask from fireworks.core.rocket_launcher import rapidfire # set up the LaunchPad and reset it launchpad = LaunchPad() launchpad.reset('', require_password=False) # create the individual FireWorks and Workflow fw1 = Firework(ScriptTask.from_str('echo "hello"'), name="hello") fw2 = Firework(ScriptTask.from_str('echo "goodbye"'), name="goodbye", parents=[fw1,]) wf = Workflow([fw1, fw2], name="test workflow") # store workflow and launch it locally launchpad.add_wf(wf) rapidfire(launchpad)
def add(self, id, command, **kwargs): '''Add a task to the dictionary of fireworks.''' name = kwargs.pop('name', 'unspecified_task') task = ScriptTask.from_str(command) firework = Firework(task, name=name, spec=self.spec) self.fireworks[id] = firework
""" This code is described in the Introductory tutorial, http://pythonhosted.org/FireWorks/introduction.html """ from fireworks import Firework, LaunchPad, ScriptTask from fireworks.core.rocket_launcher import launch_rocket if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) # create the Firework consisting of a single task firetask = ScriptTask.from_str( 'echo "howdy, your job launched successfully!"') firework = Firework(firetask) # store workflow and launch it locally launchpad.add_wf(firework) launch_rocket(launchpad)
def addRunSteps(self, inpSim, userID, cores): #builds the following workflow for running simulations #creates directory for simulation using the sim name and uuid #writes gkyl input file to the directory #runs file in the directory #if run is successful, prints 'Done', if not, prints 'Failed #plots task #userid --> for folder location self.fws = [] print(inpSim) self.last = len(self.launchpad.get_fw_ids()) # for fwork in self.launchpad.get_fw_ids(): # self.launchpad.delete_fws([fwork]) sim = Sim(inpSim) # path = '/home/adaniel99/gkylsoft/sims/'+str(userID)+'/'+inpSim+'/' path = '/home/dalex_99/gkylsoft/sims/' + str( userID) + '/' + inpSim + '/' n = 0 # for f in os.listdir('/home/adaniel99/gkylsoft/sims/'): for f in os.listdir('/home/dalex_99/gkylsoft/sims/'): if f == str(userID): # for f in os.listdir('/home/adaniel99/gkylsoft/sims/'+str(userID)+'/'): for f in os.listdir('/home/dalex_99/gkylsoft/sims/' + str(userID) + '/'): if f == inpSim: n = n + 1 self.rerun = True if n == 0: desttask = ScriptTask.from_str('mkdir ' + path) writetask = FileWriteTask({ 'files_to_write': ([{ 'filename': sim.name(), 'contents': sim.inpFile() }]), 'dest': path }) runtask = ScriptTask.from_str( 'redis-cli PUBLISH ' + User(userID).name() + '2 "Running Simulation"; mpiexec -n ' + cores + 'gkyl ' + path + sim.name()) runFlag = ScriptTask.from_str('redis-cli PUBLISH ' + User(userID).name() + '2' + ' Done') deleteFail = ScriptTask.from_str('lpad defuse_fws -i ' + str(6 + self.last)) flagFail = ScriptTask.from_str('redis-cli PUBLISH ' + User(userID).name() + '2' + ' Failed') self.ids.clear() dest = Firework(desttask, name='Make Folder', fw_id=1 + self.last) self.ids.append(1 + self.last) write = Firework(writetask, name='Write', fw_id=2 + self.last) self.ids.append(2 + self.last) run = Firework(runtask, name='Run', fw_id=3 + self.last) self.ids.append(3 + self.last) flag1 = Firework(runFlag, name='done?', fw_id=4 + self.last) self.ids.append(4 + self.last) delfail = Firework(deleteFail, name='remove fail flag', fw_id=5 + self.last) self.ids.append(5 + self.last) failflag = Firework(flagFail, name='fail flag', fw_id=6 + self.last) self.ids.append(6 + self.last) self.fws.append(dest) self.fws.append(write) self.fws.append(run) self.fws.append(flag1) self.fws.append(delfail) self.fws.append(failflag) wf = Workflow(self.fws, { dest: [write], write: [run], run: [flag1], flag1: [delfail] }, name='Running ' + sim.name()) self.launchpad.add_wf(wf) if n == 1: writetask = FileWriteTask({ 'files_to_write': ([{ 'filename': sim.name(), 'contents': sim.inpFile() }]), 'dest': path }) runtask = ScriptTask.from_str( 'redis-cli PUBLISH ' + User(userID).name() + '2 "Running Simulation"; mpiexec -n ' + cores + 'gkyl ' + path + sim.name()) runFlag = ScriptTask.from_str('redis-cli PUBLISH ' + User(userID).name() + '2' + ' Done') deleteFail = ScriptTask.from_str('lpad defuse_fws -i ' + str(5 + self.last)) flagFail = ScriptTask.from_str('redis-cli PUBLISH ' + User(userID).name() + '2' + ' Failed') self.ids.clear() write = Firework(writetask, name='Write', fw_id=1 + self.last) self.ids.append(1 + self.last) run = Firework(runtask, name='Run', fw_id=2 + self.last) self.ids.append(2 + self.last) flag1 = Firework(runFlag, name='done?', fw_id=3 + self.last) self.ids.append(3 + self.last) delfail = Firework(deleteFail, name='remove fail flag', fw_id=4 + self.last) self.ids.append(4 + self.last) failflag = Firework(flagFail, name='fail flag', fw_id=5 + self.last) self.ids.append(5 + self.last) wf = Workflow([write, run, flag1, delfail, failflag], { write: [run], run: [flag1], flag1: [delfail] }, name='Running ' + sim.name()) self.launchpad.add_wf(wf)
parser = ArgumentParser(description="test") parser.add_argument("--file_list",action="store",required=True) parser.add_argument("--outdir",action="store",required=False) parser.add_argument("--script",action="store",help='absolute path to script.py',required=False) parser.add_argument("--cores",type=int,action="store",default='32',required=False) parser.add_argument("--brick",action="store",default='2523p355',required=False) parser.add_argument("--zoom",type=int,action="store",default='1600',required=False) args = parser.parse_args() launchpad= LaunchPad(host="mongodb01",name="tractor_fireworks",username="******",password="******") launchpad.reset('', require_password=False) fns= read_lines(args.file_list) for cnt,fn in enumerate(fns): cmd="python sleep_on_it.py %d" % cnt #(args.script,fn) firetask= ScriptTask.from_str(cmd) firework= Firework(firetask, name=os.path.basename(fn)) launchpad.add_wf(firework) print 'added %d fireworks' % (len(fns)) #os.chdir(os.rundir) #for i in range(ntasks): # outdir=os.path.join(args.outdir,"b%s_zm%d_task%d" % (args.brick,args.zoom,i)) # #os.removedirs(outdir); os.makedirs(outdir); os.chdir(outdir); bash('ln -s %s legacypipe' % args.rundir) # name="task-"+str(i) # script="python legacypipe/runbrick.py \ # --zoom 1 %d 1 %d \ # --force-all --no-write \ # --pipe \ # --threads %d \ # --skip \ # --skip-calibs \
def wf_evaluate_build(fworker, build_name, dataset_set, pipe_config, include_tests=False, cache=True, kfold_config=KFOLD_DEFAULT, tags=None): """ Current fworkers: - "local": Alex's local computer - "cori": Cori - "lrc": Lawrencium """ check_pipe_config(pipe_config) if fworker not in valid_fworkers: raise ValueError("fworker must be in {}".format(valid_fworkers)) # Get a fun unique id for this build word_file = "/usr/share/dict/words" words = open(word_file).read().splitlines() words_short = [w for w in words if 4 <= len(w) <= 6] build_id = None while LP.db.automatminer_builds.find({ "build_id": build_id }).count() != 0 or not build_id: build_id = " ".join([w.lower() for w in random.sample(words_short, 2)]) print("build id: {}".format(build_id)) all_links = {} fws_fold0 = [] fws_consolidate = [] benchmark_hashes = [] for benchmark in dataset_set: links, fw_fold0, fw_consolidate = wf_benchmark( fworker, pipe_config, **benchmark, tags=tags, kfold_config=kfold_config, cache=cache, return_fireworks=True, build_id=build_id, add_dataset_to_names=True) all_links.update(links) fws_fold0.extend(fw_fold0) fws_consolidate.append(fw_consolidate) # benchmark has is the same between all fws in one benchmark benchmark_hashes.append( fw_fold0[0].to_dict()["spec"]["benchmark_hash"]) fw_build_merge = Firework(ConsolidateBenchmarksToBuild(), spec={ "benchmark_hashes": benchmark_hashes, "build_id": build_id, "pipe_config": pipe_config, "build_name": build_name, "commit": get_last_commit(), "_fworker": fworker, "tags": tags }, name="build merge ({})".format(build_id)) for fw in fws_consolidate: all_links[fw] = [fw_build_merge] if include_tests: fw_test = Firework(ScriptTask(script=RUN_TESTS_CMD), name="run tests ({})".format(build_id)) all_links[fw_test] = fws_fold0 all_links[fw_build_merge] = [] wf_name = "build: {} ({}) [{}]".format(build_id, build_name, fworker) wf = Workflow(list(all_links.keys()), all_links, name=wf_name, metadata={ "build_id": build_id, "tags": tags, "benchmark_hashes": benchmark_hashes }) return wf
def processfile(runfile): """Function to process testopia run yaml file and create workflows, add them to run in fireworks""" with open(runfile, 'r') as f: run_details = yaml.load(f) testcases = run_details['test_run']['cases'] print 'testcases:\n' print testcases testcasetype = type(testcases) print testcasetype run_id = int(run_details['test_run']['run_id']) print run_id environment_id = int(run_details['test_run']['environment_id']) print environment_id tcms = Testopia.from_config('/var/dt/tf/etc/testopia.cfg') environment_details = tcms.environment_get(environment_id) print environment_details rundetailsfromtcms = tcms.testrun_get(run_id) product_version = rundetailsfromtcms['product_version'] build_id = rundetailsfromtcms['build_id'] buildinfo = tcms.build_get(build_id) print buildinfo build_name = buildinfo['name'] print "build name: " + build_name print "product_version " + product_version environment_name = environment_details['name'] print environment_name environment_file = '/var/dt/tf/etc/environments/' + environment_name + '.py' environment_filepyc = environment_file + 'c' if os.path.isfile(environment_filepyc): print "environment pyc file is present, deleting it" os.remove(environment_filepyc) else: print "No cached environment pyc file found" print environment_file testsonfire = [] fwsequence = {} fwkey = '' fwvalue = '' for testcase in testcases.keys(): case_id = int(testcase) testcase_name = run_details['test_run']['cases'][testcase]['summary'] argsf = [ run_id, case_id, build_id, environment_id, environment_name, environment_file, testcase_name, product_version, build_name ] fw_test = Firework(PyTask(func='HookFW.runCase', args=argsf)) print "argsf are:" print argsf testsonfire.append(fw_test) if fwvalue: fwsequence[fwvalue] = fw_test fwvalue = fw_test else: fwvalue = fw_test #To be run as last firework in the workflow, to compile logs for the entire set of testcases rebotcmd = "cd /var/dt/tf/logs/" + str( run_id ) + '; rebot -N "DTTF" -R */*.xml; ln -s report.html index.html; echo ok ' fw_test = Firework(ScriptTask.from_str(rebotcmd)) testsonfire.append(fw_test) fwsequence[fwvalue] = fw_test print "tests on fire:" print testsonfire print "test sequence:" print fwsequence workflow = Workflow(testsonfire, fwsequence) launchpad = LaunchPad() launchpad.add_wf(workflow)