Example #1
0
    def test_copy(self):
        """Test that we can produce a copy of a Workflow but that the copy
        has unique fw_ids.

        """
        fws = []
        for i in range(5):
            fw = Firework([PyTask(func="print", args=[i])], fw_id=i, name=i)
            fws.append(fw)

        wf = Workflow(fws, links_dict={0: [1, 2, 3], 1: [4], 2: [4]})

        wf_copy = Workflow.from_wflow(wf)

        # now we compare to the original to make sure dependencies are same.
        # have to do gymnastics because ids will NOT be the same
        # but names are retained
        for fw in wf_copy.fws:
            children = wf_copy.links.get(fw.fw_id, list())
            orig_id = fw.name

            orig_children = wf.links.get(orig_id, list())

            for child_id, orig_child_id in zip(children, orig_children):
                self.assertEqual(orig_child_id, wf_copy.id_fw[child_id].name)
Example #2
0
    def test_append_wf(self):
        fw1 = Firework([UpdateSpecTask()])
        fw2 = Firework([ModSpecTask()])
        self.lp.add_wf(Workflow([fw1, fw2]))
        self.assertEqual(self.lp.fireworks.count(), 2)
        launch_rocket(self.lp, self.fworker)
        launch_rocket(self.lp, self.fworker)
        self.assertEqual(self.lp.launches.count(), 2)
        self.assertEqual(self.lp.fireworks.count(), 3)  # due to detour

        new_wf = Workflow([Firework([ModSpecTask()])])
        self.lp.append_wf(new_wf, [1, 2])
        launch_rocket(self.lp, self.fworker)  # launch detour
        launch_rocket(self.lp, self.fworker)  # launch new FW
        launch_rocket(self.lp, self.fworker)  # dummy launch
        new_fw = self.lp.get_fw_by_id(4)
        self.assertEqual(new_fw.spec['dummy1'], 1)
        self.assertEqual(new_fw.spec['dummy2'], [True])

        self.assertEqual(self.lp.launches.count(), 4)
        self.assertEqual(self.lp.fireworks.count(), 4)

        new_wf = Workflow([Firework([ModSpecTask()])])
        self.lp.append_wf(new_wf, [4])
        launch_rocket(self.lp, self.fworker)  # launch new FW
        new_fw = self.lp.get_fw_by_id(5)
        self.assertEqual(new_fw.spec['dummy2'], [True])

        new_wf = Workflow([Firework([ModSpecTask()])])
        self.assertRaises(ValueError, self.lp.append_wf, new_wf, [4], detour=True)
Example #3
0
 def test_remove_root_fws(self):
     fw4 = Firework(Task1(), parents=[self.fw2, self.fw3])
     fws = [self.fw1, self.fw2, self.fw3, fw4]
     wflow = Workflow(fws)
     root_ids = wflow.root_fw_ids
     children = []
     for i in root_ids:
         children.extend(wflow.links[i])
     wflow.remove_fws(wflow.root_fw_ids)
     self.assertEqual(sorted(wflow.root_fw_ids), sorted(children))
Example #4
0
 def test_remove_leaf_fws(self):
     fw4 = Firework(Task1(), parents=[self.fw2, self.fw3])
     fws = [self.fw1, self.fw2, self.fw3, fw4]
     wflow = Workflow(fws)
     leaf_ids = wflow.leaf_fw_ids
     parents = []
     for i in leaf_ids:
         parents.extend(wflow.links.parent_links[i])
     wflow.remove_fws(wflow.leaf_fw_ids)
     self.assertEqual(wflow.leaf_fw_ids, parents)
Example #5
0
 def test_remove_leaf_fws(self):
     fw4 = Firework(Task1(), parents=[self.fw2, self.fw3])
     fws = [self.fw1, self.fw2, self.fw3, fw4]
     wflow = Workflow(fws)
     leaf_ids = wflow.leaf_fw_ids
     parents = []
     for i in leaf_ids:
         parents.extend(wflow.links.parent_links[i])
     wflow.remove_fws(wflow.leaf_fw_ids)
     self.assertEqual(wflow.leaf_fw_ids, parents)
Example #6
0
 def test_remove_root_fws(self):
     fw4 = Firework(Task1(), parents=[self.fw2, self.fw3])
     fws = [self.fw1, self.fw2, self.fw3, fw4]
     wflow = Workflow(fws)
     root_ids = wflow.root_fw_ids
     children = []
     for i in root_ids:
         children.extend(wflow.links[i])
     wflow.remove_fws(wflow.root_fw_ids)
     self.assertEqual(sorted(wflow.root_fw_ids), sorted(children))
Example #7
0
    def test_append_wf_detour(self):
        fw1 = Firework([ModSpecTask()], fw_id=1)
        fw2 = Firework([ModSpecTask()], fw_id=2, parents=[fw1])
        self.lp.add_wf(Workflow([fw1, fw2]))

        new_wf = Workflow([Firework([ModSpecTask()])])
        self.lp.append_wf(new_wf, [1], detour=True)

        launch_rocket(self.lp, self.fworker)
        launch_rocket(self.lp, self.fworker)

        self.assertEqual(self.lp.get_fw_by_id(2).spec['dummy2'], [True, True])
Example #8
0
    def test_fwconnector(self):
        fw1 = Firework(ScriptTask.from_str('echo "1"'))
        fw2 = Firework(ScriptTask.from_str('echo "1"'))

        wf1 = Workflow([fw1, fw2], {fw1.fw_id: fw2.fw_id})
        self.assertEqual(wf1.links, {fw1.fw_id: [fw2.fw_id], fw2.fw_id: []})

        wf2 = Workflow([fw1, fw2], {fw1: fw2})
        self.assertEqual(wf2.links, {fw1.fw_id: [fw2.fw_id], fw2.fw_id: []})

        wf3 = Workflow([fw1, fw2])
        self.assertEqual(wf3.links, {fw1.fw_id: [], fw2.fw_id: []})
Example #9
0
    def test_cleanupfiles_calc_dir(self):
        # will test deleting from some folder specified by calc_dir
        with ScratchDir(".", copy_from_current_on_enter=True) as d:
            current_path = os.getcwd()
            os.mkdir("to_remove.relax0")
            os.mkdir("to_remove.relax1")
            os.mkdir("dont_remove.relax0")
            os.mkdir("shouldnt_touch")

            fw1 = Firework(
                [
                    DeleteFilesPrevFolder(files=["to_remove*", "dont_remove"],
                                          calc_dir=current_path),
                    PassCalcLocs(name="fw1"),
                ],
                name="fw1",
            )
            fw2 = Firework([PassCalcLocs(name="fw2")], name="fw2", parents=fw1)

            wf = Workflow([fw1, fw2])
            self.lp.add_wf(wf)
            rapidfire(self.lp)

            self.assertTrue(
                os.path.exists(os.path.join(current_path,
                                            "dont_remove.relax0")))
            self.assertTrue(
                os.path.exists(os.path.join(current_path, "shouldnt_touch")))
            self.assertFalse(
                os.path.exists(os.path.join(current_path, "to_remove.relax0")))
            self.assertFalse(
                os.path.exists(os.path.join(current_path, "to_remove.relax1")))
def vasp_jobs_scan_and_run(dir, vasp_cmd, label):
    """
    Args:
	dir: directory need to scan
        vasp_cmd: vasp run command executed by subprocess.Popen, e.g. ['mpirun','vasp_std'] or ['srun','vasp_std']
        label: a label for these jobs
    """
    work_dirs = init.get_directories_NeedVaspRun(dir)
    fws = []
    njobs = 0
    for work_dir in work_dirs:
        queue = init.queue_setup(work_dir)
        fw_name = queue['job_name']
        ftask = VaspRun()
        fw = Firework(
            [ftask],
            spec={
                'vasp_cmd': vasp_cmd,
                '_launch_dir': work_dir,
                '_queueadapter': queue,
                '_fworker': fworker
            },
            name=fw_name)
        fws.append(fw)
        njobs = njobs + 1
    wf = Workflow(fws, name=label)
    launchpad.add_wf(wf)
    return njobs
Example #11
0
    def test_iter_and_len(self):
        fws = [self.fw1, self.fw2, self.fw3]
        wflow = Workflow(fws)
        for idx, fw in enumerate(wflow):
            self.assertEqual(fw, fws[idx])

        assert len(wflow) == len(fws)
Example #12
0
    def add_wf(self, wf, reassign_all=True):
        """

        :param wf: a Workflow object.
        """
        m_timer.start("add_wf")
        if isinstance(wf, FireWork):
            wf = Workflow.from_FireWork(wf)

        # sets the root FWs as READY
        # prefer to wf.refresh() for speed reasons w/many root FWs
        for fw_id in wf.root_fw_ids:
            wf.id_fw[fw_id].state = 'READY'

        # insert the FireWorks and get back mapping of old to new ids
        old_new = self._upsert_fws(list(wf.id_fw.values()), reassign_all=reassign_all)

        # update the Workflow with the new ids
        wf._reassign_ids(old_new)

        # insert the WFLinks
        self.workflows.insert(wf.to_db_dict())

        m_timer.stop("add_wf")
        self.m_logger.info('Added a workflow. id_map: {}'.format(old_new))
        return old_new
Example #13
0
 def submit_new_workflow(self):
     # finds a submitted job, creates a workflow, and submits it to FireWorks
     job = self.jobs.find_and_modify({'state': 'SUBMITTED'},
                                     {'$set': {
                                         'state': 'WAITING'
                                     }})
     if job:
         submission_id = job['submission_id']
         try:
             firework = FireWork(
                 [StructurePredictionTask()],
                 spec={
                     'species': job['species'],
                     'threshold': job['threshold'],
                     'submission_id': submission_id
                 })
             wf = Workflow([firework],
                           metadata={'submission_id': submission_id})
             self.launchpad.add_wf(wf)
             print 'ADDED WORKFLOW FOR {}'.format(job['species'])
         except:
             self.jobs.find_and_modify({'submission_id': submission_id},
                                       {'$set': {
                                           'state': 'ERROR'
                                       }})
             traceback.print_exc()
         return submission_id
Example #14
0
    def test_files_in_out(self):
        # create the Workflow that passes files_in and files_out
        fw1 = Firework(
            [ScriptTask.from_str('echo "This is the first FireWork" > test1')],
            spec={"_files_out": {"fwtest1": "test1"}}, fw_id=1)
        fw2 = Firework([ScriptTask.from_str('gzip hello')], fw_id=2,
                       parents=[fw1],
                       spec={"_files_in": {"fwtest1": "hello"},
                             "_files_out": {"fw2": "hello.gz"}})
        fw3 = Firework([ScriptTask.from_str('cat fwtest.2')], fw_id=3,
                       parents=[fw2],
                       spec={"_files_in": {"fw2": "fwtest.2"}})
        wf = Workflow([fw1, fw2, fw3],
                      {fw1: [fw2], fw2: [fw3]})

        # store workflow and launch it locally
        self.lp.add_wf(wf)
        launch_rocket(self.lp, self.fworker)
        self.assertTrue(os.path.exists("test1"))
        launch_rocket(self.lp, self.fworker)
        self.assertTrue(os.path.exists("hello.gz"))
        launch_rocket(self.lp, self.fworker)
        self.assertTrue(os.path.exists("fwtest.2"))
        for f in ["test1", "hello.gz", "fwtest.2"]:
            os.remove(f)
Example #15
0
 def test_early_exit(self):
     os.chdir(MODULE_DIR)
     script_text = "echo hello from process $PPID; sleep 2"
     fw1 = Firework(ScriptTask.from_str(shell_cmd=script_text,
                                        parameters={"stdout_file": "task.out"}),
                    fw_id=1)
     fw2 = Firework(ScriptTask.from_str(shell_cmd=script_text,
                                        parameters={"stdout_file": "task.out"}),
                    fw_id=2)
     fw3 = Firework(ScriptTask.from_str(shell_cmd=script_text,
                                        parameters={"stdout_file": "task.out"}),
                    fw_id=3)
     fw4 = Firework(ScriptTask.from_str(shell_cmd=script_text,
                                        parameters={"stdout_file": "task.out"}),
                    fw_id=4)
     wf = Workflow([fw1, fw2, fw3, fw4], {1: [2, 3], 2: [4], 3: [4]})
     self.lp.add_wf(wf)
     launch_multiprocess(self.lp, FWorker(), 'DEBUG', 0, 2, sleep_time=0.5)
     fw2 = self.lp.get_fw_by_id(2)
     fw3 = self.lp.get_fw_by_id(3)
     with open(os.path.join(fw2.launches[0].launch_dir, "task.out")) as f:
         fw2_text = f.read()
     with open(os.path.join(fw3.launches[0].launch_dir, "task.out")) as f:
         fw3_text = f.read()
     self.assertNotEqual(fw2_text, fw3_text)
Example #16
0
    def test_copy(self):
        """Test that we can produce a copy of a Workflow but that the copy
        has unique fw_ids.

        """
        fws = []
        for i in range(5):
            fw = Firework([PyTask(func="print", args=[i])], fw_id=i,
                          name=i)
            fws.append(fw)

        wf = Workflow(fws, links_dict={0: [1, 2, 3], 1: [4], 2: [4]})

        wf_copy = Workflow.from_wflow(wf)
        
        # now we compare to the original to make sure dependencies are same.
        # have to do gymnastics because ids will NOT be the same
        # but names are retained
        for fw in wf_copy.fws:
            children = wf_copy.links.get(fw.fw_id, list())
            orig_id = fw.name

            orig_children = wf.links.get(orig_id, list())

            for child_id, orig_child_id in zip(children, orig_children):
                self.assertEqual(orig_child_id, wf_copy.id_fw[child_id].name)
Example #17
0
    def add_wf(self, wf, reassign_all=True):
        """

        :param wf: a Workflow object.
        """
        m_timer.start("add_wf")
        if isinstance(wf, FireWork):
            wf = Workflow.from_FireWork(wf)

        # sets the root FWs as READY
        # prefer to wf.refresh() for speed reasons w/many root FWs
        for fw_id in wf.root_fw_ids:
            wf.id_fw[fw_id].state = 'READY'

        # insert the FireWorks and get back mapping of old to new ids
        old_new = self._upsert_fws(list(wf.id_fw.values()), reassign_all=reassign_all)

        # update the Workflow with the new ids
        wf._reassign_ids(old_new)

        # insert the WFLinks
        self.workflows.insert(wf.to_db_dict())

        m_timer.stop("add_wf")
        self.m_logger.info('Added a workflow. id_map: {}'.format(old_new))
        return old_new
Example #18
0
    def test_init(self):

        fws = []
        for i in range(5):
            fw = Firework([PyTask(func="print", args=[i])], fw_id=i)

            fws.append(fw)
        wf = Workflow(fws, links_dict={0: [1, 2, 3], 1: [4], 2: [4]})
        self.assertIsInstance(wf, Workflow)
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              100: [4]
                          })
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              2: [100]
                          })
Example #19
0
 def create(self):
     from fireworks.core.firework import Workflow
     self.wf = Workflow(self.work_list,
                        self.connections,
                        name='VaspGWFWWorkFlow',
                        created_on=now())
     print('creating workflow')
Example #20
0
    def test_preserve_fworker(self):
        fw1 = Firework(
            [ScriptTask.from_str('echo "Testing preserve FWorker"')],
            spec={"_preserve_fworker": True},
            fw_id=1)
        fw2 = Firework(
            [ScriptTask.from_str('echo "Testing preserve FWorker pt 2"')],
            spec={"target": 1},
            parents=[fw1],
            fw_id=2)
        self.lp.add_wf(Workflow([fw1, fw2]))
        launch_rocket(self.lp, self.fworker)

        target_fw_id = self.lp.get_fw_ids({"spec.target": 1})[0]

        modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
        """
        cnt = 0
        while '_fworker' not in modified_spec and cnt < 5:
            modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
            print(modified_spec)
            time.sleep(5)
            cnt += 1
        """

        self.assertIsNotNone(modified_spec['_fworker'])
Example #21
0
def snl_to_wf_elastic(snl, parameters):
    # parameters["user_vasp_settings"] specifies user defined incar/kpoints parameters
    fws = []
    connections = defaultdict(list)
    parameters = parameters if parameters else {}

    snl_priority = parameters.get('priority', 1)
    priority = snl_priority * 2  # once we start a job, keep going!

    f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula

    # add the SNL to the SNL DB and figure out duplicate group
    tasks = [AddSNLTask()]
    spec = {'task_type': 'Add to SNL database', 'snl': snl.as_dict(), 
            '_queueadapter': QA_DB, '_priority': snl_priority}
    if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL):
        spec['force_mpsnl'] = snl.as_dict()
        spec['force_snlgroup_id'] = parameters['snlgroup_id']
        del spec['snl']
    fws.append(Firework(tasks, spec, 
                        name=get_slug(f + '--' + spec['task_type']), fw_id=0))
    connections[0] = [1]

    parameters["exact_structure"] = True
    # run GGA structure optimization for force convergence
    spec = snl_to_wf._snl_to_spec(snl, parameters=parameters)
    user_vasp_settings = parameters.get("user_vasp_settings")
    spec = update_spec_force_convergence(spec, user_vasp_settings)
    spec['run_tags'].append("origin")
    spec['_priority'] = priority
    spec['_queueadapter'] = QA_VASP
    del spec['_dupefinder']
    spec['task_type'] = "Vasp force convergence optimize structure (2x)"
    tasks = [VaspWriterTask(), get_custodian_task(spec)]
    fws.append(Firework(tasks, spec, 
                        name=get_slug(f + '--' + spec['task_type']), fw_id=1))

    # insert into DB - GGA structure optimization
    spec = {'task_type': 'VASP db insertion', '_priority': priority,
            '_allow_fizzled_parents': True, '_queueadapter': QA_DB, 
            'clean_task_doc':True, 'elastic_constant':"force_convergence"}
    fws.append(Firework([VaspToDBTask()], spec, 
                        name=get_slug(f + '--' + spec['task_type']), fw_id=2))
    connections[1] = [2]

    spec = {'task_type': 'Setup Deformed Struct Task', '_priority': priority,
                '_queueadapter': QA_CONTROL}
    fws.append(Firework([SetupDeformedStructTask()], spec, 
                        name=get_slug(f + '--' + spec['task_type']),fw_id=3))
    connections[2] = [3]

    wf_meta = get_meta_from_structure(snl.structure)
    wf_meta['run_version'] = 'May 2013 (1)'

    if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']:
        wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id']

    return Workflow(fws, connections, name=Composition(
        snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
Example #22
0
def append_wf(args):
    lp = get_lp(args)
    lp.append_wf(
        Workflow.from_file(args.wf_file),
        args.fw_id,
        detour=args.detour,
        pull_spec_mods=args.pull_spec_mods
    )
Example #23
0
    def __init__(self,
                 structure,
                 pseudos,
                 kppa=1000,
                 ecut=None,
                 pawecutdg=None,
                 accuracy="normal",
                 spin_mode="polarized",
                 smearing="fermi_dirac:0.1 eV",
                 charge=0.0,
                 scf_algorithm=None,
                 autoparal=False,
                 folder=None,
                 **extra_abivars):
        """
        Args:

        """
        abiinput = ion_ioncell_relax_input(structure=structure,
                                           pseudos=pseudos,
                                           kppa=kppa,
                                           ecut=ecut,
                                           pawecutdg=pawecutdg,
                                           accuracy=accuracy,
                                           spin_mode=spin_mode,
                                           smearing=smearing,
                                           charge=charge,
                                           scf_algorithm=scf_algorithm)

        abiinput.set_vars(**extra_abivars)

        abitask = RelaxTask(StrategyWithInput(abiinput.split_datasets()[0]))

        self.task = RelaxFWTask(abitask)

        spec = {}
        if folder:
            spec['_launch_dir'] = os.path.join(folder, 'atomic_relax')
        fw = Firework(self.task, spec=spec)

        if autoparal:
            autoparal_fw = self.create_autoparal_fw(self.task,
                                                    spec['_launch_dir'])
            self.wf = Workflow([autoparal_fw, fw], {autoparal_fw: [fw]})
        else:
            self.wf = Workflow([fw])
Example #24
0
    def __init__(self, structure, pseudos):
        abiinput = ebands_input(structure, pseudos).split_datasets()[0]

        abitask = AbiFireTask(abiinput)

        self.scf_fw = Firework(abitask)

        self.wf = Workflow([self.scf_fw])
Example #25
0
    def test_parentconnector(self):
        fw1 = Firework(ScriptTask.from_str('echo "1"'))
        fw2 = Firework(ScriptTask.from_str('echo "1"'), parents=fw1)
        fw3 = Firework(ScriptTask.from_str('echo "1"'), parents=[fw1, fw2])

        self.assertEqual(
            Workflow([fw1, fw2, fw3]).links, {fw1.fw_id: [fw2.fw_id, fw3.fw_id], fw2.fw_id: [fw3.fw_id], fw3.fw_id: []}
        )
        self.assertRaises(ValueError, Workflow, [fw1, fw3])  # can't make this
Example #26
0
    def __init__(self,
                 structure,
                 pseudos,
                 ksampling=1000,
                 relax_algo="atoms_only",
                 accuracy="normal",
                 spin_mode="polarized",
                 smearing="fermi_dirac:0.1 eV",
                 charge=0.0,
                 scf_algorithm=None,
                 autoparal=False,
                 max_restart=10,
                 folder=None,
                 **extra_abivars):
        """
        Args:

        """

        task = MultiStepRelaxStrategyFireTask(structure=structure,
                                              pseudos=pseudos,
                                              ksampling=ksampling,
                                              relax_algo=relax_algo,
                                              accuracy=accuracy,
                                              spin_mode=spin_mode,
                                              smearing=smearing,
                                              charge=charge,
                                              scf_algorithm=scf_algorithm,
                                              deps={},
                                              additional_steps=max_restart,
                                              **extra_abivars)

        spec = {}
        if folder:
            spec['_launch_dir'] = os.path.join(folder, 'relax')

        fw = Firework(task, spec=spec)

        # Create the workflow
        if autoparal:
            autoparal_fw = self.create_autoparal_fw(task, spec['_launch_dir'])
            self.wf = Workflow([autoparal_fw, fw], {autoparal_fw: [fw]})
        else:
            self.wf = Workflow([fw])
Example #27
0
 def run_task(self, fw_spec):
     # Read structure from previous relaxation
     relaxed_struct = fw_spec['output']['crystal']
     # Generate deformed structures
     d_struct_set = DeformedStructureSet(relaxed_struct, ns=0.06)
     wf=[]
     for i, d_struct in enumerate(d_struct_set.def_structs):
         fws=[]
         connections={}
         f = Composition(d_struct.formula).alphabetical_formula
         snl = StructureNL(d_struct, 'Joseph Montoya <*****@*****.**>', 
                           projects=["Elasticity"])
         tasks = [AddSNLTask()]
         snl_priority = fw_spec.get('priority', 1)
         spec = {'task_type': 'Add Deformed Struct to SNL database', 
                 'snl': snl.as_dict(), 
                 '_queueadapter': QA_DB, 
                 '_priority': snl_priority}
         if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL):
             spec['force_mpsnl'] = snl.as_dict()
             spec['force_snlgroup_id'] = fw_spec['snlgroup_id']
             del spec['snl']
         fws.append(Firework(tasks, spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-1000+i*10))
         connections[-1000+i*10] = [-999+i*10]
         spec = snl_to_wf._snl_to_spec(snl, 
                                       parameters={'exact_structure':True})
         spec = update_spec_force_convergence(spec)
         spec['deformation_matrix'] = d_struct_set.deformations[i].tolist()
         spec['original_task_id'] = fw_spec["task_id"]
         spec['_priority'] = fw_spec['_priority']*2
         #Turn off dupefinder for deformed structure
         del spec['_dupefinder']
         spec['task_type'] = "Optimize deformed structure"
         fws.append(Firework([VaspWriterTask(), SetupElastConstTask(),
                              get_custodian_task(spec)], 
                             spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-999+i*10))
         
         priority = fw_spec['_priority']*3
         spec = {'task_type': 'VASP db insertion', 
                 '_priority': priority,
                 '_allow_fizzled_parents': True, 
                 '_queueadapter': QA_DB, 
                 'elastic_constant':"deformed_structure", 
                 'clean_task_doc':True,
                 'deformation_matrix':d_struct_set.deformations[i].tolist(), 
                 'original_task_id':fw_spec["task_id"]}
         fws.append(Firework([VaspToDBTask(), AddElasticDataToDBTask()], spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-998+i*10))
         connections[-999+i*10] = [-998+i*10]
         wf.append(Workflow(fws, connections))
     return FWAction(additions=wf)
Example #28
0
    def test_getinterpolatedposcar(self):
        nimages = 5
        this_image = 1
        autosort_tol = 0.5

        fw1 = Firework([CopyVaspOutputs(calc_dir=self.static_outdir,
                                        contcar_to_poscar=False,
                                        additional_files=["CONTCAR"]),
                        PassCalcLocs(name="fw1")], name="fw1")

        fw2 = Firework([CopyVaspOutputs(calc_dir=self.opt_outdir,
                                        contcar_to_poscar=False,
                                        additional_files=["CONTCAR"]),
                        PassCalcLocs(name="fw2")], name="fw2")

        fw3 = Firework([GetInterpolatedPOSCAR(start="fw1",
                                              end="fw2",
                                              this_image=this_image,
                                              nimages=nimages,
                                              autosort_tol=autosort_tol),
                        PassCalcLocs(name="fw3")],
                       name="fw3", parents=[fw1, fw2])
        fw4 = Firework([PassCalcLocs(name="fw4")], name="fw4", parents=fw3)

        wf = Workflow([fw1, fw2, fw3, fw4])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw4 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw4"})[0])

        calc_locs = fw4.spec["calc_locs"]
        self.assertTrue(os.path.exists(get_calc_loc("fw3", calc_locs)["path"] +
                                       "/POSCAR"))
        self.assertTrue(os.path.exists(get_calc_loc("fw3", calc_locs)["path"] +
                                       "/interpolate/CONTCAR_0"))
        self.assertTrue(os.path.exists(get_calc_loc("fw3", calc_locs)["path"] +
                                       "/interpolate/CONTCAR_1"))

        struct_start = Structure.from_file(get_calc_loc("fw3", calc_locs)["path"] +
                                          "/interpolate/CONTCAR_0")
        struct_end = Structure.from_file(get_calc_loc("fw3", calc_locs)["path"] +
                                         "/interpolate/CONTCAR_1")
        struct_inter = Structure.from_file(get_calc_loc("fw3", calc_locs)["path"] +
                                           "/POSCAR")

        structs = struct_start.interpolate(struct_end,
                                           nimages,
                                           interpolate_lattices=True,
                                           autosort_tol=autosort_tol)

        # Check x of 1st site.
        self.assertAlmostEqual(structs[this_image][1].coords[0],
                               struct_inter[1].coords[0])
        # Check c lattice parameter
        self.assertAlmostEqual(structs[this_image].lattice.abc[0],
                               struct_inter.lattice.abc[0])
Example #29
0
    def test_offline_fw_passinfo(self):
        fw1 = Firework([AdditionTask()], {"input_array": [1, 1]}, name="1")
        fw2 = Firework([AdditionTask()], {"input_array": [2, 2]}, name="2")
        fw3 = Firework([AdditionTask()], {"input_array": [3]},
                       parents=[fw1, fw2],
                       name="3")

        wf = Workflow([fw1, fw2, fw3])
        self.lp.add_wf(wf)

        # make dirs for launching jobs
        cur_dir = os.path.dirname(os.path.abspath(__file__))

        os.mkdir(os.path.join(cur_dir, "launcher_1"))
        os.mkdir(os.path.join(cur_dir, "launcher_2"))
        os.mkdir(os.path.join(cur_dir, "launcher_3"))

        # launch two parent jobs
        os.chdir(os.path.join(cur_dir, "launcher_1"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        os.chdir(os.path.join(cur_dir, "launcher_2"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        # recover jobs
        for l in self.lp.offline_runs.find(
            {
                "completed": False,
                "deprecated": False
            }, {"launch_id": 1}):
            fw = self.lp.recover_offline(l['launch_id'])

        # launch child job
        os.chdir(os.path.join(cur_dir, "launcher_3"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        last_fw_id = fw.fw_id
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        # recover jobs
        for l in self.lp.offline_runs.find(
            {
                "completed": False,
                "deprecated": False
            }, {"launch_id": 1}):
            fw = self.lp.recover_offline(l['launch_id'])

        # confirm the sum in the child job
        child_fw = self.lp.get_fw_by_id(last_fw_id)
        self.assertEqual(set(child_fw.spec['input_array']), set([2, 3, 4]))
        self.assertEqual(child_fw.launches[0].action.stored_data["sum"], 9)
Example #30
0
def add_wf(args):
    lp = get_lp(args)
    if args.dir:
        files = []
        for f in args.wf_file:
            files.extend([os.path.join(f, i) for i in os.listdir(f)])
    else:
        files = args.wf_file
    for f in files:
        fwf = Workflow.from_file(f)
        lp.add_wf(fwf)
Example #31
0
    def get_wf_by_fw_id(self, fw_id):
        """
        Given a FireWork id, give back the Workflow containing that FireWork
        :param fw_id:
        :return: A Workflow object
        """

        links_dict = self.workflows.find_one({'nodes': fw_id})
        fws = map(self.get_fw_by_id, links_dict["nodes"])
        return Workflow(fws, links_dict['links'], links_dict['name'],
                        links_dict['metadata'])
Example #32
0
 def test_multi_detour(self):
     fw1 = Firework([MultipleDetourTask()], fw_id=1)
     fw2 = Firework([ScriptTask.from_str('echo "DONE"')], parents=[fw1], fw_id=2)
     self.lp.add_wf(Workflow([fw1, fw2]))
     rapidfire(self.lp)
     links = self.lp.get_wf_by_fw_id(1).links
     self.assertEqual(set(links[1]), set([2, 3, 4, 5]))
     self.assertEqual(set(links[2]), set([]))
     self.assertEqual(set(links[3]), set([2]))
     self.assertEqual(set(links[4]), set([2]))
     self.assertEqual(set(links[5]), set([2]))
Example #33
0
def add_scripts(args):
    lp = get_lp(args)
    args.names = args.names if args.names else [None] * len(args.scripts)
    args.wf_name = args.wf_name if args.wf_name else args.names[0]
    fws = []
    links = {}
    for idx, s in enumerate(args.scripts):
        fws.append(Firework(ScriptTask({'script': s, 'use_shell': True}), name=args.names[idx], fw_id=idx))
        if idx != 0:
            links[idx-1] = idx

    lp.add_wf(Workflow(fws, links, args.wf_name))
Example #34
0
    def test_spec_copy(self):
        task1 = ScriptTask.from_str('echo "Task 1"')
        task2 = ScriptTask.from_str('echo "Task 2"')

        spec = {'_category': 'dummy_category'}

        fw1 = Firework(task1, fw_id=1, name='Task 1', spec=spec)
        fw2 = Firework(task2, fw_id=2, name='Task 2', spec=spec)

        self.lp.add_wf(Workflow([fw1, fw2]))

        self.assertEqual(self.lp.get_fw_by_id(1).tasks[0]['script'][0], 'echo "Task 1"')
        self.assertEqual(self.lp.get_fw_by_id(2).tasks[0]['script'][0], 'echo "Task 2"')
Example #35
0
def task_dict_to_wf(task_dict, launchpad):
    fw_id = launchpad.get_new_fw_id()
    l_id = launchpad.get_new_launch_id()

    spec = {'task_type': task_dict['task_type'], 'run_tags': task_dict['run_tags'],
            'vaspinputset_name': None, 'vasp': None, 'mpsnl': task_dict['snl'],
            'snlgroup_id': task_dict['snlgroup_id']}
    tasks = [DummyLegacyTask()]

    launch_dir = task_dict['dir_name_full']

    stored_data = {'error_list': []}
    update_spec = {'prev_vasp_dir': task_dict['dir_name'],
                   'prev_task_type': spec['task_type'],
                   'mpsnl': spec['mpsnl'], 'snlgroup_id': spec['snlgroup_id'],
                   'run_tags': spec['run_tags']}

    fwaction = FWAction(stored_data=stored_data, update_spec=update_spec)

    if task_dict['completed_at']:
        complete_date = datetime.datetime.strptime(task_dict['completed_at'], "%Y-%m-%d %H:%M:%S")
        state_history = [{"created_on": complete_date, 'state': 'COMPLETED'}]
    else:
        state_history = []

    launches = [Launch('COMPLETED', launch_dir, fworker=None, host=None, ip=None, action=fwaction,
                       state_history=state_history, launch_id=l_id, fw_id=fw_id)]

    f = Composition(task_dict['pretty_formula']).alphabetical_formula


    fw = Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), launches=launches, state='COMPLETED', created_on=None,
                 fw_id=fw_id)

    wf_meta = get_meta_from_structure(Structure.from_dict(task_dict['snl']))
    wf_meta['run_version'] = 'preproduction (0)'

    wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta)

    launchpad.add_wf(wf, reassign_all=False)
    launchpad._upsert_launch(launches[0])

    print 'ADDED', fw_id
    # return fw_id
    return fw_id
Example #36
0
def add_wf_dir(args):
    lp = get_lp(args)
    for filename in os.listdir(args.wf_dir):
        fwf = Workflow.from_file(filename)
        lp.add_wf(fwf)
Example #37
0
def task_dict_to_wf(task_dict, launchpad):
    fw_id = launchpad.get_new_fw_id()
    l_id = launchpad.get_new_launch_id()

    spec = {
        "task_type": task_dict["task_type"],
        "run_tags": task_dict["run_tags"],
        "vaspinputset_name": None,
        "vasp": None,
        "mpsnl": task_dict["snl"],
        "snlgroup_id": task_dict["snlgroup_id"],
    }
    tasks = [DummyLegacyTask()]

    launch_dir = task_dict["dir_name_full"]

    stored_data = {"error_list": []}
    update_spec = {
        "prev_vasp_dir": task_dict["dir_name"],
        "prev_task_type": spec["task_type"],
        "mpsnl": spec["mpsnl"],
        "snlgroup_id": spec["snlgroup_id"],
        "run_tags": spec["run_tags"],
    }

    fwaction = FWAction(stored_data=stored_data, update_spec=update_spec)

    if task_dict["completed_at"]:
        complete_date = datetime.datetime.strptime(task_dict["completed_at"], "%Y-%m-%d %H:%M:%S")
        state_history = [{"created_on": complete_date, "state": "COMPLETED"}]
    else:
        state_history = []

    launches = [
        Launch(
            "COMPLETED",
            launch_dir,
            fworker=None,
            host=None,
            ip=None,
            action=fwaction,
            state_history=state_history,
            launch_id=l_id,
            fw_id=fw_id,
        )
    ]

    f = Composition(task_dict["pretty_formula"]).alphabetical_formula

    fw = Firework(
        tasks,
        spec,
        name=get_slug(f + "--" + spec["task_type"]),
        launches=launches,
        state="COMPLETED",
        created_on=None,
        fw_id=fw_id,
    )

    wf_meta = get_meta_from_structure(Structure.from_dict(task_dict["snl"]))
    wf_meta["run_version"] = "preproduction (0)"

    wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta)

    launchpad.add_wf(wf, reassign_all=False)
    launchpad._upsert_launch(launches[0])

    print "ADDED", fw_id
    # return fw_id
    return fw_id