예제 #1
0
파일: Strategy.py 프로젝트: sigveka/MoDeNa
    def workflow(self, model):
        """
        @brief    Create a FireWorks Workflow object performing initialisation.
        @details
                  The workflow

        @param model surrogate model object.

        @return Workflow object
        """
        ## Call the newPoints method to receive a list of dictionaries each
        #  dictionary representing one data point.
        p = self.newPoints(model)
        if len(p):
            wf = model.exactTasks(p)
            wf.append_wf(model.parameterFittingStrategy().workflow(model),
                         wf.leaf_fw_ids)
            return wf

        elif not len(p) and len(model.substituteModels):
            wf = Workflow([])
            for sm in model.substituteModels:
                wf.append_wf(sm.initialisationStrategy().workflow(sm), [])
            return wf

        else:
            return Workflow([])
    def run_task(self, fw_spec):
        objective_with_inc = fw_spec["%s_eval_metrics_with_inc" % EVAL_SCRIPT][OBJECTIVE_METRIC]
        objective_with_dec = fw_spec["%s_eval_metrics_with_dec" % EVAL_SCRIPT][OBJECTIVE_METRIC]
        orig_objective = fw_spec["%s_eval_metrics" % EVAL_SCRIPT][OBJECTIVE_METRIC]

        if orig_objective>=objective_with_inc and orig_objective>=objective_with_dec:
            fw_spec['coord_ascent_params'] = fw_spec['orig_param_val']#update parameter
            mod_alpha(fw_spec, 'dec')
            best_obj = fw_spec["%s_eval_metrics" % EVAL_SCRIPT][OBJECTIVE_METRIC]
            change_for_best_obj = 'const'
        elif objective_with_dec>=objective_with_inc and objective_with_dec>orig_objective:
            fw_spec['coord_ascent_params'] = fw_spec['dec_param_val']#update parameter
            mod_alpha(fw_spec, 'inc')
            fw_spec["%s_eval_metrics" % EVAL_SCRIPT] = fw_spec["%s_eval_metrics_with_dec" % EVAL_SCRIPT]#update baseline metrics for next iteration
            best_obj = fw_spec["%s_eval_metrics_with_dec" % EVAL_SCRIPT][OBJECTIVE_METRIC]
            change_for_best_obj = 'dec'

        elif objective_with_inc>objective_with_dec and objective_with_inc>orig_objective:
            fw_spec['coord_ascent_params'] = fw_spec['inc_param_val']#update parameter         
            mod_alpha(fw_spec, 'inc')
            fw_spec["%s_eval_metrics" % EVAL_SCRIPT] = fw_spec["%s_eval_metrics_with_inc" % EVAL_SCRIPT]#update baseline metrics for next iteration            
            best_obj = fw_spec["%s_eval_metrics_with_inc" % EVAL_SCRIPT][OBJECTIVE_METRIC]
            change_for_best_obj = 'inc'

        else:
            print "Coding Error ChooseNextIter()"
            print (objective_with_inc, objective_with_dec, orig_objective)
            sys.exit(1);               

        fw_spec['param_idx'] = inc_parameter_idx(fw_spec['param_idx'], fw_spec)

        if fw_spec['param_idx'] == 0:
            val_spec = copy.deepcopy(fw_spec)
            val_spec['TRAINING_SEQUENCES'] = VALIDATION_SEQUENCES    
            val_spec['seq_idx_to_eval'] = VALIDATION_SEQUENCES       
            val_spec['validation_eval'] = True   
            val_batch = Firework(RunRBPF_Batch(), spec = val_spec)    
            val_eval = Firework(RunEval(), spec = val_spec)
            storeResultsFW = Firework(StoreResultsInDatabase(), spec=val_spec)

            next_iter_firework = Firework(Iterate(), fw_spec)

            workflow = Workflow([val_batch, val_eval, storeResultsFW, next_iter_firework], 
                                {val_batch: [val_eval], val_eval: [storeResultsFW]})

        else:
            next_iter_firework = Firework(Iterate(), fw_spec)

            workflow = Workflow([next_iter_firework])

        return FWAction(stored_data = {'best_obj': best_obj,
                                       'change_for_best_obj': change_for_best_obj,
                                       'parameter_changed_val': fw_spec['coord_ascent_params']},
                        additions = workflow)
예제 #3
0
    def test_set_queue_adapter(self):
        # test fw_name_constraint
        fw1 = Firework([ScriptTask(script=None)], fw_id=-1, name="Firsttask")
        fw2 = Firework([ScriptTask(script=None)],
                       parents=[fw1],
                       fw_id=-2,
                       name="Secondtask")
        fw3 = Firework([ScriptTask(script=None)],
                       parents=[fw1],
                       fw_id=-3,
                       name="Thirdtask")

        wf = Workflow([fw1, fw2, fw3])
        wf = set_queue_adapter(wf, {"test": {
            "test": 1
        }},
                               fw_name_constraint="Secondtask")
        self.assertDictEqual(wf.id_fw[-1].spec, {})
        self.assertDictEqual(wf.id_fw[-2].spec,
                             {"_queueadapter": {
                                 "test": {
                                     "test": 1
                                 }
                             }})
        self.assertDictEqual(wf.id_fw[-3].spec, {})

        # test task_name_constraint
        fw1 = Firework([ScriptTask(script=None)], fw_id=-1, name="Firsttask")
        fw2 = Firework(
            [ScriptTask(script=None),
             ModifiedScriptTask(script=None)],
            parents=[fw1],
            fw_id=-2,
            name="Secondtask",
        )
        fw3 = Firework([ScriptTask(script=None)],
                       parents=[fw1],
                       fw_id=-3,
                       name="Thirdtask")

        wf = Workflow([fw1, fw2, fw3])
        wf = set_queue_adapter(wf, {"test": {
            "test": 1
        }},
                               task_name_constraint="ModifiedScriptTask")
        self.assertDictEqual(wf.id_fw[-1].spec, {})
        self.assertDictEqual(wf.id_fw[-2].spec,
                             {"_queueadapter": {
                                 "test": {
                                     "test": 1
                                 }
                             }})
        self.assertDictEqual(wf.id_fw[-3].spec, {})
예제 #4
0
    def test_parse_pass_write(self):

        input_file = "test.qin.opt_1"
        output_file = "test.qout.opt_1"
        calc_dir = os.path.join(module_dir, "..", "..", "test_files",
                                "FF_working")

        p_task = QChemToDb(calc_dir=calc_dir,
                           input_file=input_file,
                           output_file=output_file,
                           db_file=">>db_file<<")
        fw1 = Firework([p_task])
        w_task = WriteInputFromIOSet(qchem_input_set="OptSet",
                                     write_to_dir=module_dir)
        fw2 = Firework([w_task], parents=fw1)
        wf = Workflow([fw1, fw2])

        self.lp.add_wf(wf)
        rapidfire(
            self.lp,
            fworker=FWorker(env={"db_file": os.path.join(db_dir, "db.json")}))

        test_mol = QCInput.from_file(os.path.join(module_dir,
                                                  "mol.qin")).molecule
        np.testing.assert_equal(self.act_mol.species, test_mol.species)
        np.testing.assert_equal(self.act_mol.cart_coords, test_mol.cart_coords)
    def execute(self, name):
        try:
            self.state.saveState('READY')
            lp = LaunchPad(**self.db)
            lp.reset('', require_password=False)
            tasks = []
            for idx, command in enumerate(self.commands):
                if idx > 0:
                    tasks.append(
                        Firework(ScriptTask.from_str(command),
                                 name=f'task_{idx}',
                                 fw_id=idx,
                                 parents=[tasks[idx - 1]]))
                else:
                    tasks.append(
                        Firework(ScriptTask.from_str(command),
                                 name=f'task_{idx}',
                                 fw_id=idx))

            self.state.saveState('RUNNING')
            wf = Workflow(tasks, name=name)
            lp.add_wf(wf)
            rapidfire(lp)
            self.state.saveState('FINISHED')
        except Exception as e:
            print(e)
            self.state.saveState('ERROR')
예제 #6
0
def get_relax_static_wf(structures,
                        vasp_cmd=">>vasp_cmd<<",
                        db_file=">>db_file<<",
                        name="regular_relax",
                        **kwargs):
    """
    :param structures:
    :param vasp_cmd:
    :param db_file:
    :param name:
    :param kwargs:
    :return:
    """
    wfs = []
    for s in structures:
        fw1 = OptimizeFW(s,
                         vasp_cmd=vasp_cmd,
                         db_file=db_file,
                         parents=[],
                         **kwargs)
        fw2 = StaticFW(s, vasp_cmd=vasp_cmd, db_file=db_file, parents=[fw1])
        wfs.append(
            Workflow([fw1, fw2],
                     name=name + str(s.composition.reduced_formula)))
    return wfs
예제 #7
0
def get_wf_structure_sampler(xdatcar_file,
                             n=10,
                             steps_skip_first=1000,
                             vasp_cmd=">>vasp_cmd<<",
                             db_file=">>db_file<<",
                             name="structure_sampler",
                             **kwargs):
    """
    :param xdatcar_file:
    :param n:
    :param steps_skip_first:
    :param vasp_cmd:
    :param db_file:
    :param name:
    :param kwargs:
    :return:
    """
    structures = get_sample_structures(xdatcar_path=xdatcar_file,
                                       n=n,
                                       steps_skip_first=steps_skip_first)
    wfs = []
    for s in structures:
        fw1 = OptimizeFW(s,
                         vasp_cmd=vasp_cmd,
                         db_file=db_file,
                         parents=[],
                         **kwargs)
        fw2 = StaticFW(s, vasp_cmd=vasp_cmd, db_file=db_file, parents=[fw1])
        wfs.append(
            Workflow([fw1, fw2],
                     name=name + str(s.composition.reduced_formula)))
    return wfs
예제 #8
0
    def to_fireworks(self, method='from dict'):
        """ Returns a fireworks workflow object """
        from fireworks import Firework, Workflow

        if method == 'from dict':
            fws = []
            for step in self.get_steps():
                spec = step.copy()
                for key in ['id', 'data', 'name', 'inputs', 'outputs']:
                    del spec[key]
                spec = translate_keys(spec, 'tasks', '_tasks')
                spec = translate_keys(spec, 'name', '_fw_name')
                fws.append({
                    'name': step['name'],
                    'fw_id': step['id'],
                    'spec': spec
                })
            dct = {
                'fws': fws,
                'links': self.get_ctrlflow_links_dict(),
                'name': self['name'],
                'metadata': {}
            }
            return Workflow.from_dict(dct)
        if method == 'from object':
            return Workflow(
                fireworks=[Firework(step) for step in self.get_steps()],
                links_dict=self.get_ctrlflow_links_dict(),
                name=self['name']
            )
예제 #9
0
    def test_use_fake_qchem(self):

        input_file = "test.qin.opt_1"
        output_file = "test.qout.opt_1"
        calc_dir = os.path.join(module_dir, "..", "test_files", "FF_working")

        run_task = RunQChemDirect(
            qchem_cmd="should not need this going to be replaced with fake run"
        )
        p_task = QChemToDb(calc_dir=calc_dir,
                           input_file=input_file,
                           output_file=output_file)
        fw1 = Firework([run_task, p_task], name="test_fake_run")
        w_task = WriteInputFromIOSet(qchem_input_set="OptSet",
                                     write_to_dir=module_dir)
        fw2 = Firework([w_task], parents=fw1, name="test_write")
        wf = Workflow([fw1, fw2])
        ref_dirs = {"test_fake_run": os.path.join(calc_dir, output_file)}

        fake_run_wf = use_fake_qchem(wf, ref_dirs)
        test_fake_run = False
        for fw in fake_run_wf.fws:
            if fw.name == "test_fake_run":
                for t in fw.tasks:
                    if "RunQChemFake" in str(t):
                        test_fake_run = True
        self.assertTrue(test_fake_run)
예제 #10
0
    def test_get_lp_and_fw_id_from_task(self):
        """
        Tests the get_lp_and_fw_id_from_task. This test relies on the fact that the LaunchPad loaded from auto_load
        will be different from what is defined in TESTDB_NAME. If this is not the case the test will be skipped.
        """
        lp = LaunchPad.auto_load()

        if not lp or lp.db.name == TESTDB_NAME:
            raise unittest.SkipTest(
                "LaunchPad lp {} is not suitable for this test. Should be available and different"
                "from {}".format(lp, TESTDB_NAME))

        task = LpTask()
        # this will pass the lp
        fw1 = Firework([task],
                       spec={'_add_launchpad_and_fw_id': True},
                       fw_id=1)
        # this will not have the lp and should fail
        fw2 = Firework([task], spec={}, fw_id=2, parents=[fw1])
        wf = Workflow([fw1, fw2])
        self.lp.add_wf(wf)

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1)

        fw = self.lp.get_fw_by_id(1)

        assert fw.state == "COMPLETED"

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1)

        fw = self.lp.get_fw_by_id(2)

        assert fw.state == "FIZZLED"
예제 #11
0
    def test_get_time_report_for_wf(self):
        task = PyTask(func="time.sleep", args=[0.5])
        fw1 = Firework([task],
                       spec={
                           'wf_task_index': "test1_1",
                           "nproc": 16
                       },
                       fw_id=1)
        fw2 = Firework([task],
                       spec={
                           'wf_task_index': "test2_1",
                           "nproc": 16
                       },
                       fw_id=2)
        wf = Workflow([fw1, fw2])
        self.lp.add_wf(wf)

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR)

        wf = self.lp.get_wf_by_fw_id(1)

        assert wf.state == "COMPLETED"

        tr = get_time_report_for_wf(wf)

        assert tr.n_fws == 2
        assert tr.total_run_time > 1
예제 #12
0
    def test_parse_pass_rotate_write(self):

        input_file = "pt_gs_wb97mv_tz_initial.in"
        output_file = "pt_gs_wb97mv_tz_initial_1_job.out"
        calc_dir = os.path.join(module_dir, "..", "..", "test_files")

        p_task = QChemToDb(calc_dir=calc_dir,
                           input_file=input_file,
                           output_file=output_file,
                           db_file=">>db_file<<")
        fw1 = Firework([p_task])
        atom_indexes = [6, 8, 9, 10]
        angle = 90.0
        rot_task = RotateTorsion(atom_indexes=atom_indexes, angle=angle)
        w_task = WriteInputFromIOSet(qchem_input_set="OptSet",
                                     write_to_dir=module_dir)
        fw2 = Firework([rot_task, w_task], parents=fw1)
        wf = Workflow([fw1, fw2])

        self.lp.add_wf(wf)
        rapidfire(
            self.lp,
            fworker=FWorker(env={"db_file": os.path.join(db_dir, "db.json")}))

        test_mol = QCInput.from_file(os.path.join(module_dir,
                                                  "mol.qin")).molecule
        act_mol = Molecule.from_file(
            os.path.join(module_dir, "..", "..", "test_files",
                         "pt_rotated_90.0.xyz"))
        np.testing.assert_equal(act_mol.species, test_mol.species)
        np.testing.assert_allclose(act_mol.cart_coords,
                                   test_mol.cart_coords,
                                   atol=0.0001)
예제 #13
0
def get_wf(job_name,
           lammps_input_set,
           input_filename="lammps.inp",
           lammps_bin="lammps",
           db_file=None,
           dry_run=False):
    """
    Returns workflow that writes lammps input/data files, runs lammps and inserts to DB.

    Args:
        job_name: job name
        lammps_input_set (DictLammpsInput): lammps input
        input_filename (string): input file name
        lammps_bin (string): path to the lammps binary
        db_file (string): path to the db file
        dry_run (bool): for test purposes, decides whether or not to run the lammps binary
            with the input file.

    Returns:
        Workflow

    """
    task1 = WriteLammpsFromIOSet(lammps_input_set=lammps_input_set,
                                 input_file=input_filename)
    if dry_run:
        lammps_cmd = lammps_bin
    else:
        lammps_cmd = lammps_bin + " -in " + input_filename
    task2 = RunLammpsDirect(lammps_cmd=lammps_cmd)
    task3 = LammpsToDBTask(lammps_input=lammps_input_set, db_file=db_file)
    fw1 = Firework([task1, task2, task3], name=job_name)
    return Workflow([fw1], name=job_name)
예제 #14
0
def get_wf_molecules(molecules,
                     vasp_input_set=None,
                     db_file=None,
                     vasp_cmd="vasp",
                     name=""):
    """
    Args:
        molecules (Molecules): list of molecules to calculate
        vasp_input_set (DictSet): VaspInputSet for molecules
        db_file (string): database file path
        vasp_cmd (string): VASP command
        name (string): name for workflow

    Returns:
        workflow consisting of molecule calculations
    """
    fws = []

    for molecule in molecules:
        # molecule in box
        m_struct = molecule.get_boxed_structure(10,
                                                10,
                                                10,
                                                offset=np.array([5, 5, 5]))
        vis = vasp_input_set or MPSurfaceSet(m_struct)
        fws.append(
            OptimizeFW(structure=molecule,
                       job_type="normal",
                       vasp_input_set=vis,
                       db_file=db_file,
                       vasp_cmd=vasp_cmd))
    name = name or "molecules workflow"
    return Workflow(fws, name=name)
def main(sequencing_directory, library_prefix, num_libraries, raw_data_dir):
    lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml")))
    workflow_fireworks = []
    workflow_dependencies = collections.defaultdict(list)

    library_dirs = [
        os.path.join(sequencing_directory, library_prefix + str(i + 1))
        for i in xrange(num_libraries)
    ]
    subdirs = [
        'unzipped', 'trimmed', 'aligned_kallisto', 'bammed', 'sorted',
        'counted', 'pythonized'
    ]

    for library_dir in library_dirs:
        seq_functions.make_directories(library_dir, subdirs)

        name = "Count_%s" % os.path.basename(library_dir)
        fw_count = Firework(
            [
                CountTask(library_path=library_dir,
                          aligned_name="aligned_kallisto",
                          bammed_name="bammed",
                          counted_name="counted",
                          spikeids=['Spike1', 'Spike4', 'Spike7'])
            ],
            name=name,
            spec={"_queueadapter": {
                "job_name": name
            }},
        )
        workflow_fireworks.append(fw_count)

    lpad.add_wf(Workflow(workflow_fireworks, links_dict=workflow_dependencies))
예제 #16
0
 def test_dagflow_cut(self):
     """ disconnected graph """
     wfl = Workflow([self.fw1, self.fw2, self.fw3], {self.fw1: self.fw2})
     msg = 'The workflow graph must be connected'
     with self.assertRaises(AssertionError) as context:
         DAGFlow.from_fireworks(wfl)
     self.assertTrue(msg in str(context.exception))
예제 #17
0
def wf_creator(x):
    """
    The workflow creator function required by rocketsled.

    This wf_creator takes in an input vector x and returns a workflow which
    calculates y, the output. The requirements for using this wf_creator
    with rocketsled are:

    1. OptTask is passed into a FireWork in the workflow
    2. The fields "_x" and "_y" are written to the spec of the FireWork
        containing OptTask.
    3. You use MissionControl's "configure" method to set up the optimization,
        and pass in wf_creator as it's first argument.

    Args:
        x (list): The wf_creator input vector. In this example, it is just 3
            integers between 1 and 5 (inclusive).

    Returns:
        (Workflow): A workflow containing one FireWork (two FireTasks) which
            is automatically set up to run the optimization loop.

    """
    spec = {'_x': x}
    # ObjectiveFuncTask writes _y field to the spec internally.
    firework1 = Firework([ObjectiveFuncTask(), OptTask(**db_info)], spec=spec)
    return Workflow([firework1])
예제 #18
0
    def test_run(self):
        db = DatabaseData(self.lp.name,
                          collection="test_MongoEngineDBInsertionTask",
                          username=self.lp.username,
                          password=self.lp.password)
        task = MongoEngineDBInsertionTask(db)
        fw = Firework([task], fw_id=1, spec={"_add_launchpad_and_fw_id": True})
        wf = Workflow(
            [fw],
            metadata={
                'workflow_class': SaveDataWorkflow.workflow_class,
                'workflow_module': SaveDataWorkflow.workflow_module
            })
        self.lp.add_wf(wf)

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1)

        wf = self.lp.get_wf_by_fw_id(1)

        assert wf.state == "COMPLETED"

        # retrived the saved object
        # error if not imported locally
        from abiflows.fireworks.tasks.tests.mock_objects import DataDocument
        db.connect_mongoengine()
        with db.switch_collection(DataDocument) as DataDocument:
            data = DataDocument.objects()

            assert len(data) == 1

            assert data[0].test_field_string == "test_text"
            assert data[0].test_field_int == 5
예제 #19
0
파일: ftasks.py 프로젝트: imkimhy/dfttk
    def run_task(self, fw_spec):
        continuation = self.get('continuation', False)
        # TODO: detour the firework pending the result
        c = Custodian([ATATWalltimeHandler()],
                      [ATATInfDetJob(continuation=continuation)],
                      monitor_freq=1,
                      polling_time_step=300)
        cust_result = c.run()

        if len(cust_result[0]['corrections']) > 0:
            # we hit the walltime handler, detour another ID Firework
            os.remove('stop')
            from dfttk.fworks import InflectionDetectionFW
            from fireworks import Workflow
            # we have to add the calc locs for this calculation by hand
            # because the detour action seems to disable spec mods
            infdet_wf = Workflow([
                InflectionDetectionFW(Structure.from_file('POSCAR'),
                                      continuation=True,
                                      spec={
                                          'calc_locs':
                                          extend_calc_locs(
                                              self.get('name', 'InfDet'),
                                              fw_spec)
                                      })
            ])
            return FWAction(detours=[infdet_wf])
    def run_task(self, fw_spec):
        rbpf_batch = []
        if fw_spec['coord_ascent_iter'] > 0:
            assert('mod_direction' in fw_spec)
            fw_spec['results_folder'] = "%s/iterID_%d_dir-%s"%(fw_spec['results_folder'], fw_spec['coord_ascent_iter'],
                                                               fw_spec['mod_direction'])
        else:
            fw_spec['results_folder'] = "%s/iterID_%d"%(fw_spec['results_folder'], fw_spec['coord_ascent_iter'])

        setup_results_folder(fw_spec['results_folder'])
        rbpf_batch = []
        for run_idx in range(1, fw_spec['NUM_RUNS']+1):
            for seq_idx in fw_spec['TRAINING_SEQUENCES']:
                cur_spec = copy.deepcopy(fw_spec)
                cur_spec['run_idx'] = run_idx
                cur_spec['seq_idx'] = seq_idx
#                Q_idx = fw_spec['Q_idx']
#                if fw_spec['mod_direction'] == 'inc':
#                    cur_spec['Q'][Q_idx//4][Q_idx%4] += cur_spec['Q'][Q_idx//4][Q_idx%4]*fw_spec['mod_percent']/100.0
#                elif fw_spec['mod_direction'] == 'dec':
#                    cur_spec['Q'][Q_idx//4][Q_idx%4] -= cur_spec['Q'][Q_idx//4][Q_idx%4]*fw_spec['mod_percent']/100.0
#                else:
#                    assert(fw_spec['mod_direction'] == 'const')
                cur_firework = Firework(RunRBPF(), spec=cur_spec)
                rbpf_batch.append(cur_firework)

        parallel_workflow = Workflow(rbpf_batch)
        return FWAction(detours=parallel_workflow, mod_spec=[{'_set': {"results_folder": fw_spec['results_folder']}}])
예제 #21
0
def wf_single_fit(fworker, fit_name, pipe_config, name, df, target, tags=None):
    """
    Submit a dataset to be fit for a single pipeline (i.e., to train on a
    dataset for real predictions).
    """

    # todo this is not working probably
    warnings.warn("Single fitted MatPipe not being stored in automatminer db "
                  "collections. Please consult fw_spec to find the benchmark "
                  "on {}".format(fworker))
    if fworker not in VALID_FWORKERS:
        raise ValueError("fworker must be in {}".format(VALID_FWORKERS))

    data_file = None

    now = get_time_str()
    base_save_dir = now + "_single_fit"

    spec = {
        "pipe_config": pipe_config,
        "base_save_dir": base_save_dir,
        "data_file": data_file,
        "target": target,
        "automatminer_commit": get_last_commit(),
        "tags": tags if tags else [],
        "_fworker": fworker,
    }

    fw_name = "{} single fit".format(name)
    wf_name = "single fit: {} ({}) [{}]".format(name, fit_name, fworker)

    fw = Firework(RunSingleFit(), spec=spec, name=fw_name)
    wf = Workflow([fw], metadata={"tags": tags}, name=wf_name)
    return wf
예제 #22
0
def get_wf_FFopt_and_critic(molecule,
                            suffix,
                            qchem_input_params=None,
                            db_file=">>db_file<<",
                            **kwargs):
    """
    """

    # FFopt
    fw1 = FrequencyFlatteningOptimizeFW(
        molecule=molecule,
        name="{}:{}".format(molecule.composition.alphabetical_formula,
                            "FFopt_" + suffix),
        qchem_cmd=">>qchem_cmd<<",
        max_cores=">>max_cores<<",
        qchem_input_params=qchem_input_params,
        linked=True,
        db_file=">>db_file<<")

    # Critic
    fw2 = CubeAndCritic2FW(name="{}:{}".format(
        molecule.composition.alphabetical_formula, "CC2_" + suffix),
                           qchem_cmd=">>qchem_cmd<<",
                           max_cores=">>max_cores<<",
                           qchem_input_params=qchem_input_params,
                           db_file=">>db_file<<",
                           parents=fw1)
    fws = [fw1, fw2]

    wfname = "{}:{}".format(molecule.composition.alphabetical_formula,
                            "FFopt_CC2_WF_" + suffix)

    return Workflow(fws, name=wfname, **kwargs)
예제 #23
0
def wf_creator_basic(x):
    """Testing a basic workflow with one Firework, and two FireTasks."""
    spec = {'_x': x}
    bt = BasicTestTask()
    ot = OptTask(**db_info)
    firework1 = Firework([bt, ot], spec=spec)
    return Workflow([firework1])
예제 #24
0
def wf_single_fit(fworker, fit_name, pipe_config, name, data_pickle, target, *args,
                  tags=None, **kwargs):
    """
    Submit a dataset to be fit for a single pipeline (i.e., to train on a
    dataset for real predictions).
    """
    check_pipe_config(pipe_config)
    warnings.warn("Single fitted MatPipe not being stored in automatminer db "
                  "collections. Please consult fw_spec to find the benchmark "
                  "on {}".format(fworker))
    if fworker not in valid_fworkers:
        raise ValueError("fworker must be in {}".format(valid_fworkers))

    base_save_dir = get_time_str() + "_single_fit"

    spec = {
        "pipe_config": pipe_config,
        "base_save_dir": base_save_dir,
        "data_pickle": data_pickle,
        "target": target,
        "automatminer_commit": get_last_commit(),
        "tags": tags if tags else [],
        "_fworker": fworker
    }

    fw_name = "{} single fit".format(name)
    wf_name = "single fit: {} ({}) [{}]".format(name, fit_name, fworker)

    fw = Firework(RunSingleFit(), spec=spec, name=fw_name)
    wf = Workflow([fw], metadata={"tags": tags}, name=wf_name)
    return wf
예제 #25
0
def get_sparc_lattice_optimizations(structures,
                                    parameters={},
                                    sparc_command=None,
                                    to_db=True,
                                    psuedo_potentials_path=None,
                                    identifiers=None):
    fws = []
    if type(
            parameters
    ) != list:  # If no list of parameters is given, use the same for all
        parameters = [parameters] * len(structures)
    if type(
            identifiers
    ) != list and identifiers is not None:  # If no list of parameters is given, use the same for all
        identifiers = [identifiers] * len(structures)

    for struct, param, identifier in zip(structures, parameters, identifiers):
        name = struct.get_chemical_formula()
        fws.append(
            OptimizeLatticeSPARC(atoms_dict(struct),
                                 param,
                                 sparc_command=sparc_command,
                                 psuedo_potentials_path=psuedo_potentials_path,
                                 identifier=identifier,
                                 to_db=to_db))
    return Workflow(fws, name="{} tests wf, e.g.,".format(len(fws)))
예제 #26
0
def get_wf_neb(directory,
               nimages=7,
               functional=("pbe", {}),
               is_metal=False,
               in_custodian=False,
               number_nodes=None):
    """
    Set up a workflow that calculates the kinetic barrier between two geometries.

    # TODO
    TEMPORARY? Should NEB be integrated in other workflows? If so, should we still
    have a separate NEB workflow?

    Args:
        directory (str): Directory in which the NEB calculation should be performed.
        nimages (int): Number of images to use for the NEB calculation.
        functional (tuple): Tuple with the functional details. The first element
            contains a string that indicates the functional used ("pbe", "hse", ...),
            whereas the second element contains a dictionary that allows the user
            to specify the various functional tags.
        is_metal (bool): Flag that indicates the material being studied is a
            metal, which changes the smearing from Gaussian to second order
            Methfessel-Paxton of 0.2 eV. Defaults to False.
        in_custodian (bool): Flag that indicates that the calculations
            should be run within a Custodian. Defaults to False.
        number_nodes (int): Number of nodes that should be used for the calculations.
            Is required to add the proper `_category` to the Firework generated, so
            it is picked up by the right Fireworker. Defaults to the number of images.

    """
    directory = os.path.abspath(directory)

    # If no number of nodes is specified, take the number of images
    if number_nodes is None:
        number_nodes = nimages

    # Create the Firework that sets up and runs the NEB
    neb_firework = NebFirework(directory=directory,
                               nimages=nimages,
                               functional=functional,
                               is_metal=is_metal,
                               in_custodian=in_custodian,
                               number_nodes=number_nodes)

    # Add number of nodes to spec, or "none"
    firework_spec = {}
    if number_nodes is None:
        firework_spec.update({"_category": "none"})
    else:
        firework_spec.update({"_category": str(number_nodes) + "nodes"})

    cathode = Cathode.from_file(
        os.path.join(directory, "final", "initial_cathode.json"))
    dir_name = os.path.abspath(directory).split("/")[-1]
    workflow_name = str(cathode.composition).replace(" ", "") + " " + dir_name

    return Workflow(fireworks=[
        neb_firework,
    ], name=workflow_name)
예제 #27
0
 def test_dagflow_link(self):
     """ wrong links """
     wfl = Workflow([self.fw1, self.fw2, self.fw3],
                    {self.fw1: [self.fw2, self.fw3]})
     msg = 'An input field must have exactly one source'
     with self.assertRaises(AssertionError) as context:
         DAGFlow.from_fireworks(wfl)
     self.assertTrue(msg in str(context.exception))
예제 #28
0
 def test_add_wfs(self):
     ftask = ScriptTask.from_str('echo "lorem ipsum"')
     wfs = []
     for _ in range(50):
         # Add two workflows with 3 and 5 simple fireworks
         wf3 = Workflow([Firework(ftask, name='lorem') for _ in range(3)],
                        name='lorem wf')
         wf5 = Workflow([Firework(ftask, name='lorem') for _ in range(5)],
                        name='lorem wf')
         wfs.extend([wf3, wf5])
     self.lp.bulk_add_wfs(wfs)
     num_fws_total = sum([len(wf.fws) for wf in wfs])
     distinct_fw_ids = self.lp.fireworks.distinct('fw_id',
                                                  {'name': 'lorem'})
     self.assertEqual(len(distinct_fw_ids), num_fws_total)
     num_wfs_in_db = len(self.lp.get_wf_ids({"name": "lorem wf"}))
     self.assertEqual(num_wfs_in_db, len(wfs))
예제 #29
0
def get_workflow(fireworks=None, name=None):
    """
    returns a workflow consisting of n fireworks
    with each firework dependent one on the other in sequence 
    """

    return Workflow(fireworks, links_dict = {fireworks[i-1].fw_id:fireworks[i].fw_id for \
            i,j in enumerate(fireworks)}, name=name )
예제 #30
0
파일: ftasks.py 프로젝트: ghifi37/dfttk
    def run_task(self, fw_spec):
        # unrelaxed cell
        cell = Structure.from_file('POSCAR')
        cell.to(filename='str.out', fmt='mcsqs')

        # relaxed cell
        cell = Structure.from_file('CONTCAR')
        cell.to(filename='str_relax.out', fmt='mcsqs')

        # check the symmetry
        out = subprocess.run(['checkrelax', '-1'], stdout=subprocess.PIPE)
        relaxation = float(out.stdout)

        # we relax too much, add a volume relax and inflection detection WF as a detour
        if relaxation > self['tolerance']:
            from dfttk.fworks import OptimizeFW, InflectionDetectionFW
            from fireworks import Workflow
            from dfttk.input_sets import RelaxSet
            from dfttk.utils import add_modify_incar_by_FWname, add_modify_kpoints_by_FWname

            fws = []
            vis = RelaxSet(self.get('structure'), volume_relax=True)
            vol_relax_fw = OptimizeFW(
                self.get('structure'),
                symmetry_tolerance=None,
                job_type='normal',
                name='Volume relax',  #record_path = True, 
                vasp_input_set=vis,
                modify_incar={'ISIF': 7},
                vasp_cmd=self.get('vasp_cmd'),
                db_file=self.get('db_file'),
                metadata=self.get('metadata'),
            )
            fws.append(vol_relax_fw)

            modify_incar_params = self.get('modify_incar_params')
            modify_kpoints_params = self.get('modify_kpoints_params')

            # we have to add the calc locs for this calculation by hand
            # because the detour action seems to disable spec mods
            fws.append(
                InflectionDetectionFW(
                    self.get('structure'),
                    parents=[vol_relax_fw],
                    Pos_Shape_relax=self.get('Pos_Shape_relax') or False,
                    metadata=self.get('metadata'),
                    db_file=self.get('db_file'),
                    spec={
                        'calc_locs':
                        extend_calc_locs(self.get('name', 'Full relax'),
                                         fw_spec)
                    }))
            infdet_wf = Workflow(fws)
            add_modify_incar_by_FWname(infdet_wf,
                                       modify_incar_params=modify_incar_params)
            add_modify_kpoints_by_FWname(
                infdet_wf, modify_kpoints_params=modify_kpoints_params)
            return FWAction(detours=[infdet_wf])