Ejemplo n.º 1
0
 def run_task(self, fw_spec):
     print('Running the Multiple Detour Task')
     dt1 = Firework(
         ScriptTask.from_str('echo "this is intermediate job 1"'))
     dt2 = Firework(
         ScriptTask.from_str('echo "this is intermediate job 2"'))
     dt3 = Firework(
         ScriptTask.from_str('echo "this is intermediate job 3"'))
     return FWAction(detours=[dt1, dt2, dt3])
Ejemplo n.º 2
0
    def test_getinterpolatedposcar(self):
        nimages = 5
        this_image = 1
        autosort_tol = 0.5

        fw1 = Firework([CopyVaspOutputs(calc_dir=self.static_outdir,
                                        contcar_to_poscar=False,
                                        additional_files=["CONTCAR"]),
                        PassCalcLocs(name="fw1")], name="fw1")

        fw2 = Firework([CopyVaspOutputs(calc_dir=self.opt_outdir,
                                        contcar_to_poscar=False,
                                        additional_files=["CONTCAR"]),
                        PassCalcLocs(name="fw2")], name="fw2")

        fw3 = Firework([GetInterpolatedPOSCAR(start="fw1",
                                              end="fw2",
                                              this_image=this_image,
                                              nimages=nimages,
                                              autosort_tol=autosort_tol),
                        PassCalcLocs(name="fw3")],
                       name="fw3", parents=[fw1, fw2])
        fw4 = Firework([PassCalcLocs(name="fw4")], name="fw4", parents=fw3)

        wf = Workflow([fw1, fw2, fw3, fw4])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw4 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw4"})[0])

        calc_locs = fw4.spec["calc_locs"]
        self.assertTrue(os.path.exists(get_calc_loc("fw3", calc_locs)["path"] +
                                       "/POSCAR"))
        self.assertTrue(os.path.exists(get_calc_loc("fw3", calc_locs)["path"] +
                                       "/interpolate/CONTCAR_0"))
        self.assertTrue(os.path.exists(get_calc_loc("fw3", calc_locs)["path"] +
                                       "/interpolate/CONTCAR_1"))

        struct_start = Structure.from_file(get_calc_loc("fw3", calc_locs)["path"] +
                                          "/interpolate/CONTCAR_0")
        struct_end = Structure.from_file(get_calc_loc("fw3", calc_locs)["path"] +
                                         "/interpolate/CONTCAR_1")
        struct_inter = Structure.from_file(get_calc_loc("fw3", calc_locs)["path"] +
                                           "/POSCAR")

        structs = struct_start.interpolate(struct_end,
                                           nimages,
                                           interpolate_lattices=True,
                                           autosort_tol=autosort_tol)

        # Check x of 1st site.
        self.assertAlmostEqual(structs[this_image][1].coords[0],
                               struct_inter[1].coords[0])
        # Check c lattice parameter
        self.assertAlmostEqual(structs[this_image].lattice.abc[0],
                               struct_inter.lattice.abc[0])
Ejemplo n.º 3
0
 def run_task(self, fw_spec):
     # Read structure from previous relaxation
     relaxed_struct = fw_spec['output']['crystal']
     # Generate deformed structures
     d_struct_set = DeformedStructureSet(relaxed_struct, ns=0.06)
     wf=[]
     for i, d_struct in enumerate(d_struct_set.def_structs):
         fws=[]
         connections={}
         f = Composition(d_struct.formula).alphabetical_formula
         snl = StructureNL(d_struct, 'Joseph Montoya <*****@*****.**>', 
                           projects=["Elasticity"])
         tasks = [AddSNLTask()]
         snl_priority = fw_spec.get('priority', 1)
         spec = {'task_type': 'Add Deformed Struct to SNL database', 
                 'snl': snl.as_dict(), 
                 '_queueadapter': QA_DB, 
                 '_priority': snl_priority}
         if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL):
             spec['force_mpsnl'] = snl.as_dict()
             spec['force_snlgroup_id'] = fw_spec['snlgroup_id']
             del spec['snl']
         fws.append(Firework(tasks, spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-1000+i*10))
         connections[-1000+i*10] = [-999+i*10]
         spec = snl_to_wf._snl_to_spec(snl, 
                                       parameters={'exact_structure':True})
         spec = update_spec_force_convergence(spec)
         spec['deformation_matrix'] = d_struct_set.deformations[i].tolist()
         spec['original_task_id'] = fw_spec["task_id"]
         spec['_priority'] = fw_spec['_priority']*2
         #Turn off dupefinder for deformed structure
         del spec['_dupefinder']
         spec['task_type'] = "Optimize deformed structure"
         fws.append(Firework([VaspWriterTask(), SetupElastConstTask(),
                              get_custodian_task(spec)], 
                             spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-999+i*10))
         
         priority = fw_spec['_priority']*3
         spec = {'task_type': 'VASP db insertion', 
                 '_priority': priority,
                 '_allow_fizzled_parents': True, 
                 '_queueadapter': QA_DB, 
                 'elastic_constant':"deformed_structure", 
                 'clean_task_doc':True,
                 'deformation_matrix':d_struct_set.deformations[i].tolist(), 
                 'original_task_id':fw_spec["task_id"]}
         fws.append(Firework([VaspToDBTask(), AddElasticDataToDBTask()], spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-998+i*10))
         connections[-999+i*10] = [-998+i*10]
         wf.append(Workflow(fws, connections))
     return FWAction(additions=wf)
Ejemplo n.º 4
0
    def test_offline_fw_passinfo(self):
        fw1 = Firework([AdditionTask()], {"input_array": [1, 1]}, name="1")
        fw2 = Firework([AdditionTask()], {"input_array": [2, 2]}, name="2")
        fw3 = Firework([AdditionTask()], {"input_array": [3]},
                       parents=[fw1, fw2],
                       name="3")

        wf = Workflow([fw1, fw2, fw3])
        self.lp.add_wf(wf)

        # make dirs for launching jobs
        cur_dir = os.path.dirname(os.path.abspath(__file__))

        os.mkdir(os.path.join(cur_dir, "launcher_1"))
        os.mkdir(os.path.join(cur_dir, "launcher_2"))
        os.mkdir(os.path.join(cur_dir, "launcher_3"))

        # launch two parent jobs
        os.chdir(os.path.join(cur_dir, "launcher_1"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        os.chdir(os.path.join(cur_dir, "launcher_2"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        # recover jobs
        for l in self.lp.offline_runs.find(
            {
                "completed": False,
                "deprecated": False
            }, {"launch_id": 1}):
            fw = self.lp.recover_offline(l['launch_id'])

        # launch child job
        os.chdir(os.path.join(cur_dir, "launcher_3"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        last_fw_id = fw.fw_id
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        # recover jobs
        for l in self.lp.offline_runs.find(
            {
                "completed": False,
                "deprecated": False
            }, {"launch_id": 1}):
            fw = self.lp.recover_offline(l['launch_id'])

        # confirm the sum in the child job
        child_fw = self.lp.get_fw_by_id(last_fw_id)
        self.assertEqual(set(child_fw.spec['input_array']), set([2, 3, 4]))
        self.assertEqual(child_fw.launches[0].action.stored_data["sum"], 9)
Ejemplo n.º 5
0
 def test_multi_detour(self):
     fw1 = Firework([MultipleDetourTask()], fw_id=1)
     fw2 = Firework([ScriptTask.from_str('echo "DONE"')], parents=[fw1], fw_id=2)
     self.lp.add_wf(Workflow([fw1, fw2]))
     rapidfire(self.lp)
     links = self.lp.get_wf_by_fw_id(1).links
     self.assertEqual(set(links[1]), set([2, 3, 4, 5]))
     self.assertEqual(set(links[2]), set([]))
     self.assertEqual(set(links[3]), set([2]))
     self.assertEqual(set(links[4]), set([2]))
     self.assertEqual(set(links[5]), set([2]))
Ejemplo n.º 6
0
    def test_append_wf_detour(self):
        fw1 = Firework([ModSpecTask()], fw_id=1)
        fw2 = Firework([ModSpecTask()], fw_id=2, parents=[fw1])
        self.lp.add_wf(Workflow([fw1, fw2]))

        new_wf = Workflow([Firework([ModSpecTask()])])
        self.lp.append_wf(new_wf, [1], detour=True)

        launch_rocket(self.lp, self.fworker)
        launch_rocket(self.lp, self.fworker)

        self.assertEqual(self.lp.get_fw_by_id(2).spec['dummy2'], [True, True])
Ejemplo n.º 7
0
    def test_spec_copy(self):
        task1 = ScriptTask.from_str('echo "Task 1"')
        task2 = ScriptTask.from_str('echo "Task 2"')

        spec = {'_category': 'dummy_category'}

        fw1 = Firework(task1, fw_id=1, name='Task 1', spec=spec)
        fw2 = Firework(task2, fw_id=2, name='Task 2', spec=spec)

        self.lp.add_wf(Workflow([fw1, fw2]))

        self.assertEqual(self.lp.get_fw_by_id(1).tasks[0]['script'][0], 'echo "Task 1"')
        self.assertEqual(self.lp.get_fw_by_id(2).tasks[0]['script'][0], 'echo "Task 2"')
Ejemplo n.º 8
0
def createVaspSRCFireworks(vasp_input_set, task_helper, task_type, control_procedure,
                           custodian_handlers, max_restarts, src_cleaning, task_index, spec,
                           setup_spec_update=None, run_spec_update=None):
    # Make a full copy of the spec
    if spec is None:
        spec = {}
    spec = copy.deepcopy(spec)
    spec['_add_launchpad_and_fw_id'] = True
    spec['_add_fworker'] = True
    # Initialize the SRC task_index
    if task_index is not None:
        src_task_index = SRCTaskIndex.from_any(task_index)
    else:
        src_task_index = SRCTaskIndex.from_string(task_type)
    spec['SRC_task_index'] = src_task_index

    # SetupTask
    setup_spec = copy.deepcopy(spec)
    # Remove any initial queue_adapter_update from the spec
    setup_spec.pop('queue_adapter_update', None)

    setup_spec = set_short_single_core_to_spec(setup_spec)
    setup_spec['_preserve_fworker'] = True
    setup_spec['_pass_job_info'] = True
    setup_spec.update({} if setup_spec_update is None else setup_spec_update)
    setup_task = VaspSetupTask(vasp_input_set=vasp_input_set, deps=None, task_helper=task_helper, task_type=task_type)
    setup_fw = Firework(setup_task, spec=setup_spec, name=src_task_index.setup_str)

    # RunTask
    run_spec = copy.deepcopy(spec)
    run_spec['SRC_task_index'] = src_task_index
    run_spec['_preserve_fworker'] = True
    run_spec['_pass_job_info'] = True
    run_spec.update({} if run_spec_update is None else run_spec_update)
    run_task = VaspRunTask(control_procedure=control_procedure, task_helper=task_helper, task_type=task_type,
                           custodian_handlers=custodian_handlers)
    run_fw = Firework(run_task, spec=run_spec, name=src_task_index.run_str)

    # ControlTask
    control_spec = copy.deepcopy(spec)
    control_spec = set_short_single_core_to_spec(control_spec)
    control_spec['SRC_task_index'] = src_task_index
    control_spec['_allow_fizzled_parents'] = True
    control_task = VaspControlTask(control_procedure=control_procedure, manager=None, max_restarts=max_restarts,
                                   src_cleaning=src_cleaning, task_helper=task_helper)
    control_fw = Firework(control_task, spec=control_spec, name=src_task_index.control_str)

    links_dict = {setup_fw.fw_id: [run_fw.fw_id],
                  run_fw.fw_id: [control_fw.fw_id]}
    return {'setup_fw': setup_fw, 'run_fw': run_fw, 'control_fw': control_fw, 'links_dict': links_dict,
            'fws': [setup_fw, run_fw, control_fw]}
Ejemplo n.º 9
0
    def test_copyfilesfromcalcloc(self):
        fw1 = Firework(
            [
                CopyVaspOutputs(calc_dir=self.plain_outdir),
                PassCalcLocs(name="fw1")
            ],
            name="fw1",
        )

        fw2 = Firework(
            [
                CopyVaspOutputs(calc_dir=self.relax2_outdir),
                PassCalcLocs(name="fw2")
            ],
            name="fw2",
        )

        fw3 = Firework(
            [
                CopyFilesFromCalcLoc(
                    calc_loc="fw1",
                    filenames=["POSCAR"],
                    name_prepend="",
                    name_append="_0",
                ),
                CopyFilesFromCalcLoc(
                    calc_loc="fw2",
                    filenames=["POSCAR"],
                    name_prepend="",
                    name_append="_1",
                ),
                PassCalcLocs(name="fw3"),
            ],
            name="fw3",
            parents=[fw1, fw2],
        )
        fw4 = Firework([PassCalcLocs(name="fw4")], name="fw4", parents=fw3)

        wf = Workflow([fw1, fw2, fw3, fw4])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw4 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw4"})[0])

        calc_locs = fw4.spec["calc_locs"]
        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw3", calc_locs)["path"] + "/POSCAR_0"))
        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw3", calc_locs)["path"] + "/POSCAR_1"))
Ejemplo n.º 10
0
    def test_parallel_fibadder(self):
        # this is really testing to see if a Workflow can handle multiple FWs updating it at once
        parent = Firework(ScriptTask.from_str("python -c 'print(\"test1\")'", {'store_stdout': True}))
        fib1 = Firework(FibonacciAdderTask(), {'smaller': 0, 'larger': 1, 'stop_point': 30}, parents=[parent])
        fib2 = Firework(FibonacciAdderTask(), {'smaller': 0, 'larger': 1, 'stop_point': 30}, parents=[parent])
        fib3 = Firework(FibonacciAdderTask(), {'smaller': 0, 'larger': 1, 'stop_point': 30}, parents=[parent])
        fib4 = Firework(FibonacciAdderTask(), {'smaller': 0, 'larger': 1, 'stop_point': 30}, parents=[parent])
        wf = Workflow([parent, fib1, fib2, fib3, fib4])
        self.lp.add_wf(wf)

        p = Pool(NCORES_PARALLEL_TEST)

        creds_array = [self.lp.to_dict()] * NCORES_PARALLEL_TEST
        p.map(random_launch, creds_array)
Ejemplo n.º 11
0
    def test_category_pt2(self):
        task1 = ScriptTask.from_str('echo "Task 1"')
        task2 = ScriptTask.from_str('echo "Task 2"')

        fw1 = Firework(task1, fw_id=1, name='Task 1')
        fw2 = Firework(task2, fw_id=2, name='Task 2')

        self.lp.add_wf(Workflow([fw1, fw2]))

        self.assertFalse(self.lp.run_exists(FWorker(category="dummy_category")))
        self.assertTrue(self.lp.run_exists(FWorker(category="__none__")))
        self.assertTrue(self.lp.run_exists(FWorker())) # can run any category
        self.assertFalse(self.lp.run_exists(FWorker(category=["dummy_category",
                                                             "other category"])))
Ejemplo n.º 12
0
 def test_org_wf(self):
     test1 = ScriptTask.from_str("python -c 'print(\"test1\")'",
                                 {'store_stdout': True})
     test2 = ScriptTask.from_str("python -c 'print(\"test2\")'",
                                 {'store_stdout': True})
     fw1 = Firework(test1, fw_id=-1)
     fw2 = Firework(test2, fw_id=-2)
     wf = Workflow([fw1, fw2], {-1: -2})
     self.lp.add_wf(wf)
     launch_rocket(self.lp, self.fworker)
     self.assertEqual(self.lp.get_launch_by_id(1).action.stored_data['stdout'],
                      'test1\n')
     launch_rocket(self.lp, self.fworker)
     self.assertEqual(self.lp.get_launch_by_id(2).action.stored_data['stdout'],
                      'test2\n')
Ejemplo n.º 13
0
    def test_cleanupfiles_calc_loc(self):
        # will test deleting files from previous folder
        fw1 = Firework(
            [
                CreateFolder(folder_name="to_remove.relax0"),
                CreateFolder(folder_name="to_remove.relax1"),
                CreateFolder(folder_name="dont_remove.relax0"),
                CreateFolder(folder_name="shouldnt_touch"),
                PassCalcLocs(name="fw1"),
            ],
            name="fw1",
        )
        fw2 = Firework(
            [
                DeleteFilesPrevFolder(files=["to_remove*", "dont_remove"],
                                      calc_loc=True),
                PassCalcLocs(name="fw2"),
            ],
            name="fw2",
            parents=fw1,
        )

        wf = Workflow([fw1, fw2])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw2 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw2"})[0])
        calc_locs = fw2.spec["calc_locs"]

        self.assertTrue(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"],
                    "dont_remove.relax0")))
        self.assertTrue(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"], "shouldnt_touch")))
        self.assertFalse(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"],
                    "to_remove.relax0")))
        self.assertFalse(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"],
                    "to_remove.relax1")))
Ejemplo n.º 14
0
    def test_job_info(self):
        fw1 = Firework([ScriptTask.from_str('echo "Testing job info"')],
                       spec={"_pass_job_info": True},
                       fw_id=1)
        fw2 = Firework([DummyJobPassTask()],
                       parents=[fw1],
                       spec={
                           "_pass_job_info": True,
                           "target": 1
                       },
                       fw_id=2)
        fw3 = Firework([DummyJobPassTask()],
                       parents=[fw2],
                       spec={"target": 2},
                       fw_id=3)
        self.lp.add_wf(Workflow([fw1, fw2, fw3]))
        launch_rocket(self.lp, self.fworker)

        target_fw_id = self.lp.get_fw_ids({"spec.target": 1})[0]
        modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
        """
        cnt = 0
        while '_job_info' not in modified_spec and cnt < 5:
            print(modified_spec)
            modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
            time.sleep(5)
            cnt += 1
        """

        self.assertIsNotNone(modified_spec['_job_info'])
        self.assertIsNotNone(modified_spec['_job_info'][0]["launch_dir"])
        self.assertEqual(modified_spec['_job_info'][0]['name'], 'Unnamed FW')
        self.assertEqual(modified_spec['_job_info'][0]['fw_id'], 1)

        launch_rocket(self.lp, self.fworker)

        target_fw_id = self.lp.get_fw_ids({"spec.target": 2})[0]
        modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
        """
        cnt = 0
        while '_job_info' not in modified_spec and cnt < 5:
            print(modified_spec)
            modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
            time.sleep(5)
            cnt += 1
        """

        self.assertEqual(len(modified_spec['_job_info']), 2)
Ejemplo n.º 15
0
    def test_init(self):

        fws = []
        for i in range(5):
            fw = Firework([PyTask(func="print", args=[i])], fw_id=i)

            fws.append(fw)
        wf = Workflow(fws, links_dict={0: [1, 2, 3], 1: [4], 2: [4]})
        self.assertIsInstance(wf, Workflow)
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              100: [4]
                          })
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              2: [100]
                          })
Ejemplo n.º 16
0
 def test_archive(self):
     p = PyTask(func="fireworks.tests.mongo_tests.throw_error",
                args=["This should not happen"])
     fw = Firework(p)
     self.lp.add_wf(fw)
     self.lp.archive_wf(fw.fw_id)
     self.assertFalse(launch_rocket(self.lp, self.fworker))
Ejemplo n.º 17
0
    def test_tracker(self):
        """
        Launch a workflow and track the files
        """
        self._teardown([self.dest1])
        try:
            fts = []
            for i in range(5, 100):
                ft = ScriptTask.from_str(
                    'echo "' + str(i) + '" >> ' + self.dest1,
                    {"store_stdout": True})
                fts.append(ft)

            fw = Firework(fts,
                          spec={"_trackers": [self.tracker1]},
                          fw_id=20,
                          name="test_fw")
            self.lp.add_wf(fw)
            launch_rocket(self.lp, self.fworker)

            # print (self.tracker1.track_file())
            self.assertEqual("98\n99", self.tracker1.track_file())

        finally:
            self._teardown([self.dest1])
Ejemplo n.º 18
0
    def test_backgroundtask(self):
        dest1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             'hello.txt')
        self._teardown([dest1])

        try:
            test1 = ScriptTask.from_str(
                "python -c 'print(\"testing background...\")'",
                {'store_stdout': True})

            bg_task1 = BackgroundTask(FileWriteTask(
                {'files_to_write': [{
                    'filename': dest1,
                    'contents': 'hello'
                }]}),
                                      num_launches=1,
                                      run_on_finish=True)
            fw = Firework(test1, spec={'_background_tasks': [bg_task1]})
            self.lp.add_wf(fw)
            launch_rocket(self.lp, self.fworker)

            with open(dest1) as f:
                self.assertEqual(f.read(), 'hello')

        finally:
            self._teardown([dest1])
Ejemplo n.º 19
0
def counterpoise_correction_generation_fw(molname, charge, spin_multiplicity,
                                          qm_method, fragments,
                                          mission, priority=1,
                                          parent_fwid=None,
                                          additional_user_tags=None,
                                          large=False):
    fw_spec = dict()
    fw_spec["user_tags"] = dict()
    fw_spec["user_tags"]["molname"] = molname
    fw_spec["user_tags"]["mission"] = mission
    fw_spec["qm_method"] = qm_method
    fw_spec["fragments"] = fragments
    fw_spec["charge"] = charge
    fw_spec["spin_multiplicity"] = spin_multiplicity
    fw_spec["large"] = large
    fw_spec["task_type"] = "counterpoise correction generation"
    if priority:
        fw_spec['_priority'] = priority
    fw_spec["user_tags"].update(additional_user_tags)
    fwid_base = 1
    if parent_fwid:
        if not (isinstance(parent_fwid, int) or isinstance(parent_fwid, list)):
            raise ValueError("Parent FireWork ID must be integer or list")
        parent_fwid = parent_fwid if isinstance(parent_fwid, list) \
            else [parent_fwid]
        fwid_base = max(parent_fwid) + 1
    current_fwid = fwid_base
    links_dict = dict()
    fw_cp = Firework([CounterpoiseCorrectionGenerationTask()],
                     spec=fw_spec,
                     name=molname + " Counterpoise Correction Generation",
                     fw_id=current_fwid)
    for p_fwid in parent_fwid:
        links_dict[p_fwid] = current_fwid
    return [fw_cp], links_dict
Ejemplo n.º 20
0
def vasp_jobs_scan_and_run(dir, vasp_cmd, label):
    """
    Args:
	dir: directory need to scan
        vasp_cmd: vasp run command executed by subprocess.Popen, e.g. ['mpirun','vasp_std'] or ['srun','vasp_std']
        label: a label for these jobs
    """
    work_dirs = init.get_directories_NeedVaspRun(dir)
    fws = []
    njobs = 0
    for work_dir in work_dirs:
        queue = init.queue_setup(work_dir)
        fw_name = queue['job_name']
        ftask = VaspRun()
        fw = Firework(
            [ftask],
            spec={
                'vasp_cmd': vasp_cmd,
                '_launch_dir': work_dir,
                '_queueadapter': queue,
                '_fworker': fworker
            },
            name=fw_name)
        fws.append(fw)
        njobs = njobs + 1
    wf = Workflow(fws, name=label)
    launchpad.add_wf(wf)
    return njobs
Ejemplo n.º 21
0
    def test_basic_fw_offline(self):
        test1 = ScriptTask.from_str("python -c 'print(\"test1\")'",
                                    {'store_stdout': True})
        fw = Firework(test1)
        self.lp.add_wf(fw)

        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())

        setup_offline_job(self.lp, fw, launch_id)

        launch_rocket(None, self.fworker)

        with open(os.path.join(os.getcwd(), "FW_offline.json")) as f:
            fwo = json.load(f)
            self.assertEquals(fwo["state"], "COMPLETED")
            self.assertEquals(fwo["launch_id"], 1)
            self.assertEquals(fwo["fwaction"], {'update_spec': {}, 'mod_spec': [], 'stored_data': {'returncode': 0, 'stdout': u'test1\n', 'all_returncodes': [0]}, 'exit': False, 'detours': [], 'additions': [], 'defuse_children': False, 'defuse_workflow': False})

        with open(os.path.join(os.getcwd(), "FW_ping.json")) as f:
            fwp = json.load(f)
            self.assertIsNotNone(fwp["ping_time"])

        l = self.lp.offline_runs.find_one({"completed": False, "deprecated": False}, {"launch_id": 1})
        self.lp.recover_offline(l['launch_id'])
        self.assertEqual(self.lp.get_launch_by_id(1).action.stored_data['stdout'], 'test1\n')
Ejemplo n.º 22
0
    def test_copy(self):
        """Test that we can produce a copy of a Workflow but that the copy
        has unique fw_ids.

        """
        fws = []
        for i in range(5):
            fw = Firework([PyTask(func="print", args=[i])], fw_id=i, name=i)
            fws.append(fw)

        wf = Workflow(fws, links_dict={0: [1, 2, 3], 1: [4], 2: [4]})

        wf_copy = Workflow.from_wflow(wf)

        # now we compare to the original to make sure dependencies are same.
        # have to do gymnastics because ids will NOT be the same
        # but names are retained
        for fw in wf_copy.fws:
            children = wf_copy.links.get(fw.fw_id, list())
            orig_id = fw.name

            orig_children = wf.links.get(orig_id, list())

            for child_id, orig_child_id in zip(children, orig_children):
                self.assertEqual(orig_child_id, wf_copy.id_fw[child_id].name)
Ejemplo n.º 23
0
 def create_autoparal_fw(cls, firetask, folder=None):
     spec = {'_queueadapter': {'ntasks': 1, 'walltime': '00:10:00'}}
     if folder:
         spec['_launch_dir'] = folder + '_autoparal'
     autoparal_task = AutoparalFireTask(firetask)
     autoparal_fw = Firework(autoparal_task, spec=spec)
     return autoparal_fw
Ejemplo n.º 24
0
    def conclude_task(self, fw_spec):
        """
        A relax task updates forwards an updated structure for the following tasks/FW
        """
        action = super(RelaxFWTask, self).conclude_task(fw_spec)

        actual_dilatmx = self.abitask.get_inpvar('dilatmx', 1.)
        if self.target_dilatmx and self.target_dilatmx < actual_dilatmx:
            self.abitask.reduce_dilatmx(target=self.target_dilatmx)
            self.abitask.reset_from_scratch()
            # Ignore the previous dependencies, since we are restarting with a different structure
            # enable autoparal for the next task, since the optimal parameters could have bee
            restart_task = RelaxFWTask(self.abitask,
                                       deps={},
                                       handlers=self.handlers,
                                       target_dilatmx=self.target_dilatmx,
                                       is_autoparal=False,
                                       dep_id=self.dep_id)
            action.detours.append(
                Firework(
                    restart_task,
                    spec={k: v
                          for k, v in fw_spec.items() if k != '_tasks'}))
            logger.info(
                'Converging dilatmx. Value reduce from {} to {}. New FW created.'
                .format(actual_dilatmx, self.abitask.get_inpvar('dilatmx')))
        else:
            # FIXME here the code assumes that 'abi_dept' is already in the update_spec
            action.update_spec['abi_deps'].update({
                'struct_' + str(self.dep_id):
                self.abitask.get_final_structure().as_dict()
            })

        return action
Ejemplo n.º 25
0
 def test_fizzle(self):
     p = PyTask(func="fireworks.tests.mongo_tests.throw_error", args=["Testing; this error is normal."])
     fw = Firework(p)
     self.lp.add_wf(fw)
     self.assertTrue(launch_rocket(self.lp, self.fworker))
     self.assertEqual(self.lp.get_fw_by_id(1).state, 'FIZZLED')
     self.assertFalse(launch_rocket(self.lp, self.fworker))
Ejemplo n.º 26
0
    def task_analysis(self, fw_spec):
        """
        A relax task updates forwards an updated structure for the following tasks/FWs.
        If the status is Unconverged does not raise an exception, but creates a new FW.
        Previous dependencies are not forwarded to the new FW
        """

        # Raise an exception if the final status is not Unconverged or OK.
        if self.abitask.status < self.abitask.S_UNCONVERGED or self.abitask.status == self.abitask.S_ERROR:
            raise AbinitRuntimeError(self)

        if self.abitask.status == self.abitask.S_UNCONVERGED:
            stored_data = {'history': list(self.abitask.history)}
            if self.additional_steps <= 1:
                raise AbinitRuntimeError(self)
            new_task = self.copy()
            new_task.structure = self.abitask.read_final_structure()
            new_task.deps = self.parse_deps({self: "WFK"})
            new_task.additional_steps = self.additional_steps - 1
            new_spec = {'abi_deps': {'dep_' + str(self.task_id): os.getcwd()}}
            if '_queueadapter' in fw_spec:
                new_spec['_queueadapter'] = fw_spec.get('_queueadapter')
            if '_launch_dir' in fw_spec:
                new_spec['_launch_dir'] = self.new_workdir(
                    fw_spec['_launch_dir'])
            new_step = Firework(new_task, spec=new_spec)
            return FWAction(stored_data=stored_data, detours=new_step)
        else:
            return super(MultiStepRelaxStrategyFireTask,
                         self).task_analysis(fw_spec)
Ejemplo n.º 27
0
    def test_tracker_failed_fw(self):
        """
        Add a bad firetask to workflow and test the tracking
        """
        self._teardown([self.dest1])
        try:
            fts =  []
            for i in range(5,50):
                ft = ScriptTask.from_str('echo "' + str(i) + '" >> '+ self.dest1,
                                        {'store_stdout':True})
                fts.append(ft)
            fts.append(ScriptTask.from_str('cat 4 >> ' + self.dest1))
            for i in range(51,100):
                ft = ScriptTask.from_str('echo "' + str(i) + '" >> ' + self.dest1,
                                        {'store_stdout':True})
                fts.append(ft)

            fw = Firework(fts, spec={'_trackers':[self.tracker1]}, fw_id=21, name='test_fw')
            self.lp.add_wf(fw)

            try:
                print("===========================================")
                print("Bad rocket launched. The failure below is OK")
                print("===========================================")
                launch_rocket(self.lp, self.fworker)
            except:
                pass

            self.assertEqual('48\n49',self.tracker1.track_file())

        finally:
            self._teardown([self.dest1])
Ejemplo n.º 28
0
 def test_force_lock_removal(self):
     test1 = ScriptTask.from_str("python -c 'print(\"test1\")'", {'store_stdout': True})
     fw = Firework(test1, {"_dupefinder": DupeFinderExact()}, fw_id=1)
     self.lp.add_wf(fw)
     # add a manual lock
     with WFLock(self.lp, 1):
         with WFLock(self.lp, 1, kill=True, expire_secs=1):
             self.assertTrue(True)  # dummy to make sure we got here
Ejemplo n.º 29
0
    def __init__(self, structure, pseudos):
        abiinput = ebands_input(structure, pseudos).split_datasets()[0]

        abitask = AbiFireTask(abiinput)

        self.scf_fw = Firework(abitask)

        self.wf = Workflow([self.scf_fw])
Ejemplo n.º 30
0
 def test_basic_fw(self):
     test1 = ScriptTask.from_str("python -c 'print(\"test1\")'",
                                 {'store_stdout': True})
     fw = Firework(test1)
     self.lp.add_wf(fw)
     launch_rocket(self.lp, self.fworker)
     self.assertEqual(self.lp.get_launch_by_id(1).action.stored_data[
         'stdout'], 'test1\n')