Exemplo n.º 1
0
def get_launchpad(launchpad_file=None):
    """
    Returns a LaunchPad object. If the launchpad_file is None, then try to auto load from environment

    Args:
        launchpad_file (File-like): A file-like or file path to the LaunchPad file.

    Returns:
        LaunchPad
    """
    if launchpad_file:
        if isinstance(launchpad_file, file):
            # a file object was found
            ext = launchpad_file.name.split('.')[-1]
            if ext == 'yaml':
                launchpad = LaunchPad.from_format(launchpad_file.read(),
                                                  f_format='yaml')
            else:
                # assume json
                launchpad = LaunchPad.from_format(launchpad_file.read())
        else:
            # assume launchpad_file is a path
            launchpad = LaunchPad.from_file(launchpad_file)
    else:
        launchpad = LaunchPad.auto_load()
    return launchpad
Exemplo n.º 2
0
    def test_get_lp_and_fw_id_from_task(self):
        """
        Tests the get_lp_and_fw_id_from_task. This test relies on the fact that the LaunchPad loaded from auto_load
        will be different from what is defined in TESTDB_NAME. If this is not the case the test will be skipped.
        """
        lp = LaunchPad.auto_load()

        if not lp or lp.db.name == TESTDB_NAME:
            raise unittest.SkipTest("LaunchPad lp {} is not suitable for this test. Should be available and different"
                                    "from {}".format(lp, TESTDB_NAME))

        task = LpTask()
        # this will pass the lp
        fw1 = Firework([task], spec={'_add_launchpad_and_fw_id': True}, fw_id=1)
        # this will not have the lp and should fail
        fw2 = Firework([task], spec={}, fw_id=2, parents=[fw1])
        wf = Workflow([fw1, fw2])
        self.lp.add_wf(wf)

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1)

        fw = self.lp.get_fw_by_id(1)

        assert fw.state == "COMPLETED"

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1)

        fw = self.lp.get_fw_by_id(2)

        assert fw.state == "FIZZLED"
Exemplo n.º 3
0
    def __init__(self, *args, **kwargs):
        super(OptTask, self).__init__(*args, **kwargs)

        # Configuration attrs
        lp = self.get("launchpad", LaunchPad.auto_load())
        if isinstance(lp, LaunchPad):
            lp = lp.to_dict()
        self.lpad = LaunchPad.from_dict(lp)
        self.opt_label = self.get("opt_label", "opt_default")
        self.c = getattr(self.lpad.db, self.opt_label)
        self.config = self.c.find_one({"doctype": "config"})
        if self.config is None:
            raise NotConfiguredError("Please use MissionControl().configure to "
                                     "configure the optimization database "
                                     "({} - {}) before running OptTask."
                                     "".format(self.lpad.db, self.opt_label))
        self.wf_creator = deserialize(self.config["wf_creator"])
        self.x_dims = self.config["dimensions"]
        self._xdim_types = self.config["dim_types"]
        self.is_discrete_all = self.config["is_discrete_all"]
        self.is_discrete_any = self.config["is_discrete_any"]
        self.wf_creator_args = self.config["wf_creator_args"] or []
        self.wf_creator_kwargs = self.config["wf_creator_kwargs"] or {}
        self.predictor = self.config["predictor"]
        self.predictor_args = self.config["predictor_args"] or []
        self.predictor_kwargs = self.config["predictor_kwargs"] or {}
        self.maximize = self.config["maximize"]
        self.n_search_pts = self.config["n_search_pts"]
        self.n_train_pts = self.config["n_train_pts"]
        self.n_bootstraps = self.config["n_bootstraps"]
        self.acq = self.config["acq"]
        self.space_file = self.config["space_file"]
        self.onehot_categorical = self.config["onehot_categorical"]
        self.duplicate_check = self.config["duplicate_check"]
        self.get_z = self.config["get_z"]
        if self.get_z:
            self.get_z = deserialize(self.config['get_z'])
        else:
            self.get_z = lambda *ars, **kws: []
        self.get_z_args = self.config["get_z_args"] or []
        self.get_z_kwargs = self.config["get_z_kwargs"] or {}
        self.z_file = self.config["z_file"]
        self.enforce_sequential = self.config["enforce_sequential"]
        self.tolerances = self.config["tolerances"]
        self.batch_size = self.config["batch_size"]
        self.timeout = self.config["timeout"]

        # Declared attrs
        self.n_objs = None
        plist = [RandomForestRegressor, GaussianProcessRegressor,
                 ExtraTreesRegressor, GradientBoostingRegressor]
        self.builtin_predictors = {p.__name__: p for p in plist}
        self._n_cats = 0
        self._encoding_info = []

        # Query formats
        self._completed = {'x': {'$exists': 1}, 'y': {'$exists': 1,
                                                      '$ne': 'reserved'},
                           'z': {'$exists': 1}}
        self._manager = {'lock': {'$exists': 1}, 'queue': {'$exists': 1}}
Exemplo n.º 4
0
    def test_get_lp_and_fw_id_from_task(self):
        """
        Tests the get_lp_and_fw_id_from_task. This test relies on the fact that the LaunchPad loaded from auto_load
        will be different from what is defined in TESTDB_NAME. If this is not the case the test will be skipped.
        """
        lp = LaunchPad.auto_load()

        if not lp or lp.db.name == TESTDB_NAME:
            raise unittest.SkipTest(
                "LaunchPad lp {} is not suitable for this test. Should be available and different"
                "from {}".format(lp, TESTDB_NAME))

        task = LpTask()
        # this will pass the lp
        fw1 = Firework([task],
                       spec={'_add_launchpad_and_fw_id': True},
                       fw_id=1)
        # this will not have the lp and should fail
        fw2 = Firework([task], spec={}, fw_id=2, parents=[fw1])
        wf = Workflow([fw1, fw2])
        self.lp.add_wf(wf)

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1)

        fw = self.lp.get_fw_by_id(1)

        assert fw.state == "COMPLETED"

        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1)

        fw = self.lp.get_fw_by_id(2)

        assert fw.state == "FIZZLED"
Exemplo n.º 5
0
    def _setup_db(self, fw_spec):
        """
        Sets up a MongoDB database for storing optimization data.

        Args:
            fw_spec (dict): The spec of the Firework which contains this Firetask.

        Returns:
            None
        """

        # TODO: @ardunn - doesn't look like this process will work with password-protected LaunchPad. Most people have their FWS databases password-protected.  - AJ

        opt_label = self['opt_label'] if 'opt_label' in self else 'opt_default'
        db_reqs = ('host', 'port', 'name')
        db_defined = [req in self for req in db_reqs]

        if all(db_defined):
            host, port, name = [self[k] for k in db_reqs]

        elif any(db_defined):
            raise AttributeError("Host, port, and name must all be specified!")

        elif 'lpad' in self:
            lpad = self['lpad']
            host, port, name = [lpad[req] for req in db_reqs]

        elif '_add_launchpad_and_fw_id' in fw_spec:
            if fw_spec['_add_launchpad_and_fw_id']:
                try:
                    host, port, name = [getattr(self.launchpad, req) for req in db_reqs]

                except AttributeError:
                    # launchpad tried to get attributes of a multiprocessing proxy object.
                    raise Exception("_add_launchpad_and_fw_id is currently working with parallel workflows.")  # TODO: @ardunn - is this still an issue? - AJ

        else:
            try:
                host, port, name = [getattr(LaunchPad.auto_load(), req) for req in db_reqs]

            except AttributeError:
                # auto_load did not return any launchpad object, so nothing was defined.
                raise AttributeError("The optimization database must be specified explicitly (with host, port, and "
                                     "name), with Launchpad object (lpad), by setting _add_launchpad_and_fw_id to True "
                                     "in the fw_spec, or by defining LAUNCHPAD_LOC in fw_config.py for "
                                     "LaunchPad.auto_load()")  # TODO: @ardunn - LAUNCHPAD_LOC is typically not set through fw_config.py (that requires modifying FWS source code), it's set through a config file: https://materialsproject.github.io/fireworks/config_tutorial.html  - AJ

        mongo = MongoClient(host, port)
        db = getattr(mongo, name)
        self.collection = getattr(db, opt_label)

        # TODO: @ardunn - put the below in a different function, e.g. "set_queries"(?) Or just put in root level of run_task  - AJ
        # TODO: @ardunn - document what these queries are better. Also, instead of "format" maybe call it "query", e.g. self._explored_query  - AJ
        x = fw_spec['_x_opt']
        self._explored_format = {'x': {'$exists': 1}, 'yi': {'$ne': [], '$exists': 1}, 'z': {'$exists': 1}}
        self._unexplored_inclusive_format = {'x': {'$exists': 1}, 'yi': {'$exists': 0}}
        self._unexplored_noninclusive_format = {'x': {'$ne': x, '$exists': 1}, 'yi': {'$exists': 0}}
        self._manager_format = {'lock': {'$exists': 1}, 'queue': {'$exists': 1}}
Exemplo n.º 6
0
    def add_workflow(workflow):
        """
        Use Fireworks to add a generated workflow.

        :param workflow: a Workflow object (should have been generated by one
        of the workflow-generating functions about).
        :return:
        """

        launchpad = LaunchPad.auto_load()
        launchpad.add_wf(workflow)
Exemplo n.º 7
0
    def __init__(self,
                 source,
                 materials,
                 wf_function,
                 material_filter=None,
                 lpad=None,
                 **kwargs):
        """
        Adds workflows to a launchpad based on material inputs.
        This is primarily to be used for derivative property
        workflows but could in principles used to generate workflows
        for any workflow that can be invoked from structure data

        Args:
            source (Store): store of properties
            materials (Store): Store of materials properties
            material_filter (dict): dict filter for getting items to process
                e. g. {"elasticity": None}
            wf_function (string or method): method to generate a workflow
                based on structure in document with missing property
                can be a string to be loaded or a custom method.
                Note that the builder/runner will not be serializable
                with custom methods.
            lpad (LaunchPad or dict): fireworks launchpad to use for adding
                workflows, can either be None (autoloaded), a LaunchPad
                instance, or a dict from which the LaunchPad will be invoked
            **kwargs (kwargs): kwargs for builder
        """
        self.source = source
        self.materials = materials
        # Will this be pickled properly for multiprocessing? could just put
        # it into the processor if that's the case
        if isinstance(wf_function, six.string_types):
            self.wf_function = load_class(*wf_function.rsplit('.', 1))
            self._wf_function_string = wf_function
        elif callable(wf_function):
            self.wf_function = wf_function
            self._wf_function_string = None
        else:
            raise ValueError("wf_function must be callable or a string "
                             "corresponding to a loadable method")
        self.material_filter = material_filter
        if lpad is None:
            self.lpad = LaunchPad.auto_load()
        elif isinstance(lpad, dict):
            self.lpad = LaunchPad.from_dict(lpad)
        else:
            self.lpad = lpad

        super().__init__(sources=[source, materials], targets=[], **kwargs)
Exemplo n.º 8
0
 def test_config(self):
     lpad = LaunchPad.auto_load()
     self.assertEqual(lpad.fireworks.database.name, "mp_workshop")
Exemplo n.º 9
0
    def run_task(self, fw_spec):
        '''
        run_num: maximum number of appending VASP running; this limitation is to avoid always running due to bad settings;
            only for internal usage;

        Important args:
        tolerance: acceptable value for average RMS, recommend >= 0.005;
        threshold: total point number above the value should be reduced, recommend < 16 or much time to run;
        del_limited: maximum deletion ration for large results;
        vol_spacing: the maximum ratio step between two volumes, larger step will be inserted points to calculate;
        '''
        max_run = 10
        deformations = self.get('deformations') or []
        db_file = self['db_file']
        tag = self['tag']
        vasp_cmd = self['vasp_cmd']
        metadata = self['metadata']
        relax_path = self['relax_path'] or ''
        structure = self.get('structure') or None
        run_num = self.get('run_num') or 0
        tolerance = self.get('tolerance') or 0.005
        threshold = self.get('threshold') or 14
        del_limited = self.get('del_limited') or 0.3
        vol_spacing = self.get('vol_spacing') or 0.05
        t_min = self.get('t_min') or 5
        t_max = self.get('t_max') or 2000
        t_step = self.get('t_step') or 5
        phonon = self.get('phonon') or False
        phonon_supercell_matrix = self.get('phonon_supercell_matrix') or None
        verbose = self.get('verbose') or False
        modify_incar_params = self.get('modify_incar_params') or {}
        modify_kpoints_params = self.get('modify_kpoints_params') or {}
        powerups_options = modify_incar_params.get('powerups', None)
        symmetry_tolerance = self.get('symmetry_tolerance') or None
        run_isif2 = self.get('run_isif2') or None
        pass_isif4 = self.get('pass_isif4') or False
        site_properties = self.get('site_properties') or None
        store_volumetric_data = self.get('store_volumetric_data', False)
        run_num += 1

        volumes, energies = self.get_orig_EV_structure(db_file, tag)
        self.check_points(db_file, metadata, tolerance, 0.1, del_limited,
                          volumes, energies, verbose)

        EVcheck_result = init_evcheck_result(run_num, self.correct, volumes,
                                             energies, tolerance, threshold,
                                             vol_spacing, self.error, metadata)

        structure.scale_lattice(self.minE_value)
        if site_properties:
            for pkey in site_properties:
                structure.add_site_property(pkey, site_properties[pkey])
        vol_orig = structure.volume
        volume, energy = gen_volenergdos(self.points, volumes, energies)
        vol_adds = self.check_vol_coverage(volume, vol_spacing, vol_orig,
                                           run_num, energy, structure, phonon,
                                           db_file, tag, t_min, t_max, t_step,
                                           EVcheck_result)  # Normalized to 1
        if self.correct or len(vol_adds) > 0:
            EVcheck_result['sellected'] = volume
            EVcheck_result['minE_value'] = self.minE_value
            EVcheck_result['append'] = (vol_adds).tolist()
            # Marked as adopted in db
            lpad = LaunchPad.auto_load()
            fws = []
            if len(vol_adds) > 0:  # VASP calculations need to append
                if run_num < max_run:
                    # Do VASP and check again
                    print('Appending PreStatic of : %s to calculate in VASP!' %
                          (vol_adds * vol_orig).tolist())

                    fws = []
                    prestatic_calcs = []
                    vis_prestatic = PreStaticSet(structure)
                    for vol_add in vol_adds:
                        prestatic = StaticFW(
                            structure=structure,
                            job_type='normal',
                            name='VR_%.3f-PreStatic' % vol_add,
                            prev_calc_loc=False,
                            vasp_input_set=vis_prestatic,
                            vasp_cmd=">>vasp_cmd<<",
                            db_file=self.get('db_file', DB_FILE),
                            metadata=metadata,
                            Prestatic=True)
                        fws.append(prestatic)
                        prestatic_calcs.append(prestatic)

                    check_result = Firework(
                        PreEV_check(
                            db_file=self.get('db_file', DB_FILE),
                            tag=tag,
                            relax_path=relax_path,
                            deformations=deformations,
                            run_isif2=run_isif2,
                            tolerance=tolerance,
                            threshold=14,
                            vol_spacing=vol_spacing,
                            vasp_cmd=">>vasp_cmd<<",
                            pass_isif4=pass_isif4,
                            metadata=metadata,
                            t_min=t_min,
                            t_max=t_max,
                            t_step=t_step,
                            phonon=phonon,
                            symmetry_tolerance=symmetry_tolerance,
                            phonon_supercell_matrix=phonon_supercell_matrix,
                            verbose=verbose,
                            site_properties=site_properties,
                            modify_incar_params=modify_incar_params,
                            modify_kpoints_params=modify_kpoints_params),
                        parents=prestatic_calcs,
                        name='%s-PreEV_check%s' %
                        (structure.composition.reduced_formula, run_num))
                    fws.append(check_result)
                    strname = "{}:{}".format(
                        structure.composition.reduced_formula, 'PreEV_check')
                    wfs = Workflow(fws, name=strname, metadata=metadata)
                    if modify_incar_params != {}:
                        from dfttk.utils import add_modify_incar_by_FWname
                        add_modify_incar_by_FWname(
                            wfs, modify_incar_params=modify_incar_params)
                    if modify_kpoints_params != {}:
                        from dfttk.utils import add_modify_kpoints_by_FWname
                        add_modify_kpoints_by_FWname(
                            wfs, modify_kpoints_params=modify_kpoints_params)
                    wfs = Customizing_Workflows(
                        wfs, powerups_options=powerups_options)
                    lpad.add_wf(wfs)
                else:
                    too_many_run_error()
            else:  # No need to do more VASP calculation, QHA could be running
                relax_path, run_isif2, pass_isif4 = check_relax_path(
                    relax_path, db_file, tag, run_isif2, pass_isif4)
                if relax_path == '':
                    print(
                        'Success in PreStatic calculations, entering Position relax ...'
                    )
                    vis_relax = RelaxSet(structure)
                    ps2_relax_fw = OptimizeFW(
                        structure,
                        symmetry_tolerance=symmetry_tolerance,
                        job_type='normal',
                        name='MinE V=%.3f relax' % vol_orig,
                        prev_calc_loc=False,
                        vasp_input_set=vis_relax,
                        vasp_cmd=">>vasp_cmd<<",
                        db_file=self.get('db_file', DB_FILE),
                        metadata=metadata,
                        record_path=True,
                        modify_incar={'ISIF': 2},
                        run_isif2=run_isif2,
                        pass_isif4=pass_isif4,
                        modify_incar_params=modify_incar_params,
                        modify_kpoints_params=modify_kpoints_params,
                        spec={'_preserve_fworker': True},
                        store_volumetric_data=store_volumetric_data)
                    fws.append(ps2_relax_fw)
                else:
                    print(
                        'Initial setting found, enter static claculations ...')
                    ps2_relax_fw = None
                check_result = Firework(
                    EVcheck_QHA(
                        db_file=self.get('db_file', DB_FILE),
                        tag=tag,
                        relax_path=relax_path,
                        tolerance=tolerance,
                        run_isif2=run_isif2,
                        threshold=threshold,
                        vol_spacing=vol_spacing,
                        vasp_cmd=">>vasp_cmd<<",
                        run_num=run_num,
                        metadata=metadata,
                        t_min=t_min,
                        t_max=t_max,
                        t_step=t_step,
                        phonon=phonon,
                        deformations=deformations,
                        phonon_supercell_matrix=phonon_supercell_matrix,
                        symmetry_tolerance=symmetry_tolerance,
                        modify_incar_params=modify_incar_params,
                        verbose=verbose,
                        pass_isif4=pass_isif4,
                        modify_kpoints_params=modify_kpoints_params,
                        site_properties=site_properties),
                    parents=ps2_relax_fw,
                    name='%s-EVcheck_QHA' %
                    structure.composition.reduced_formula,
                    store_volumetric_data=store_volumetric_data)
                fws.append(check_result)
                strname = "{}:{}".format(structure.composition.reduced_formula,
                                         'prePS2_Relax')
                wfs = Workflow(fws, name=strname, metadata=metadata)
                if modify_incar_params != {}:
                    from dfttk.utils import add_modify_incar_by_FWname
                    add_modify_incar_by_FWname(
                        wfs, modify_incar_params=modify_incar_params)
                if modify_kpoints_params != {}:
                    from dfttk.utils import add_modify_kpoints_by_FWname
                    add_modify_kpoints_by_FWname(
                        wfs, modify_kpoints_params=modify_kpoints_params)
                wfs = Customizing_Workflows(wfs,
                                            powerups_options=powerups_options)
                lpad.add_wf(wfs)
        else:  # failure to meet the tolerance
            if len(volumes
                   ) == 0:  #self.error == 1e10:   # Bad initial running set
                pass_result_error()
            else:  # fitting fails
                tol_error()

        import json
        with open('PreStatic_check_summary.json', 'w') as fp:
            json.dump(EVcheck_result, fp)
Exemplo n.º 10
0
    def run_task(self, fw_spec):
        ''' 
        run_num: maximum number of appending VASP running; this limitation is to avoid always running due to bad settings;
            only for internal usage;

        Important args:
        tolerance: acceptable value for average RMS, recommend >= 0.005;
        threshold: total point number above the value should be reduced, recommend < 16 or much time to run;
        del_limited: maximum deletion ration for large results;
        vol_spacing: the maximum ratio step between two volumes, larger step will be inserted points to calculate;
        '''
        max_run = 10
        deformations = self.get('deformations') or []
        db_file = self['db_file']
        tag = self['tag']
        vasp_cmd = self['vasp_cmd']
        metadata = self['metadata']
        relax_path = self['relax_path'] or ''
        run_num = self.get('run_num') or 0
        tolerance = self.get('tolerance') or 0.005
        threshold = self.get('threshold') or 14
        del_limited = self.get('del_limited') or 0.3
        vol_spacing = self.get('vol_spacing') or 0.03
        t_min = self.get('t_min') or 5 
        t_max = self.get('t_max') or 2000
        t_step = self.get('t_step') or 5
        phonon = self.get('phonon') or False
        phonon_supercell_matrix = self.get('phonon_supercell_matrix') or None
        verbose = self.get('verbose') or False
        modify_incar_params = self.get('modify_incar_params') or {}
        modify_kpoints_params = self.get('modify_kpoints_params') or {}
        Pos_Shape_relax = self.get('Pos_Shape_relax') or False
        symmetry_tolerance = self.get('symmetry_tolerance') or None
        run_num += 1
        
        relax_path, Pos_Shape_relax = check_relax_path(relax_path, db_file, tag, Pos_Shape_relax)
        if relax_path == '':
            print('''
#######################################################################
#                                                                     #
#       Cannot find relax path for static calculations, exit!         #
#               You can modify the tag and run again!                 #
#                                                                     #
#######################################################################
                ''')
            return
        
        from pymatgen.io.vasp.inputs import Poscar
        poscar = Poscar.from_file(relax_path + '/CONTCAR')
        structure = poscar.structure
        
        if phonon:
            if not consistent_check_db(db_file, tag):
                print('Please check DB, DFTTK running ended!')
                return

        volumes, energies, dos_objs = self.get_orig_EV(db_file, tag)
        vol_adds = self.check_deformations_in_volumes(deformations, volumes, structure.volume)
        if (len(vol_adds)) == 0:
            self.check_points(db_file, metadata, tolerance, threshold, del_limited, volumes, energies, verbose)
        else:
            self.correct = True
            self.error = 1e10
        
        EVcheck_result = {}
        EVcheck_result['append_run_num'] = run_num
        EVcheck_result['correct'] = self.correct
        EVcheck_result['volumes'] = volumes
        EVcheck_result['energies'] = energies
        EVcheck_result['tolerance'] = tolerance
        EVcheck_result['threshold'] = threshold
        EVcheck_result['vol_spacing'] = vol_spacing
        EVcheck_result['error'] = self.error
        EVcheck_result['metadata'] = metadata

        if self.correct:
            vol_orig = structure.volume
            if (len(vol_adds)) == 0:
                volume, energy, dos_obj = self.gen_volenergdos(self.points, volumes, energies, dos_objs)
                vol_adds = self.check_vol_coverage(volume, vol_spacing, vol_orig, run_num, 
                                                   energy, structure, dos_obj, phonon, 
                                                   db_file, tag, t_min, t_max, t_step,
                                                   EVcheck_result)   # Normalized to 1
                EVcheck_result['sellected'] = volume
                EVcheck_result['append'] = (vol_adds * vol_orig).tolist()
                # Marked as adopted in db
                mark_adopted(tag, db_file, volume)
            lpad = LaunchPad.auto_load()
            fws = []
            if len(vol_adds) > 0:      # VASP calculations need to append
                if run_num < max_run:
                    # Do VASP and check again
                    print('Appending the volumes of : %s to calculate in VASP!' %(vol_adds * vol_orig).tolist())
                    calcs = []
                    vis_relax = RelaxSet(structure)
                    vis_static = StaticSet(structure)
                    isif = 5 if 'infdet' in relax_path else 4
                    for vol_add in vol_adds:
                        if Pos_Shape_relax:
                            ps_relax_fw = OptimizeFW(structure, scale_lattice=vol_add, symmetry_tolerance=None, modify_incar = {'ISIF': isif},
                                                     job_type='normal', name='Pos_Shape_%.3f-relax' %(vol_add * vol_orig), prev_calc_loc=relax_path, 
                                                     vasp_input_set=vis_relax, vasp_cmd=vasp_cmd, db_file=db_file, metadata=metadata, Pos_Shape_relax = True,
                                                     modify_incar_params=modify_incar_params, modify_kpoints_params = modify_kpoints_params,
                                                     parents=None)
                            calcs.append(ps_relax_fw)
                            fws.append(ps_relax_fw)
                            static = StaticFW(structure, name = 'structure_%.3f-static' %(vol_add * vol_orig), vasp_input_set=vis_static, vasp_cmd=vasp_cmd, 
                                              db_file=db_file, metadata=metadata, prev_calc_loc=True, parents=ps_relax_fw)
                        else:
                            static = StaticFW(structure, scale_lattice=vol_add, name = 'structure_%.3f-static' %(vol_add * vol_orig), vasp_input_set=vis_static, vasp_cmd=vasp_cmd, 
                                              db_file=db_file, metadata=metadata, prev_calc_loc=relax_path, parents=None)
                        fws.append(static)
                        calcs.append(static)
                        if phonon:
                            visphonon = ForceConstantsSet(structure)
                            phonon_fw = PhononFW(structure, phonon_supercell_matrix, t_min=t_min, t_max=t_max, t_step=t_step,
                                     name='structure_%.3f-phonon' %(vol_add * vol_orig), vasp_input_set=visphonon,
                                     vasp_cmd=vasp_cmd, db_file=db_file, metadata=metadata,
                                     prev_calc_loc=True, parents=static)
                            fws.append(phonon_fw)
                            calcs.append(phonon_fw)
                    check_result = Firework(EVcheck_QHA(db_file = db_file, tag = tag, relax_path = relax_path, tolerance = tolerance, 
                                                        threshold = threshold, vol_spacing = vol_spacing, vasp_cmd = vasp_cmd, run_num = run_num,
                                                        metadata = metadata, t_min = t_min, t_max = t_max, t_step = t_step, phonon = phonon,
                                                        phonon_supercell_matrix = phonon_supercell_matrix, symmetry_tolerance = symmetry_tolerance,
                                                        modify_incar_params = modify_incar_params, verbose = verbose, Pos_Shape_relax = Pos_Shape_relax,
                                                        modify_kpoints_params = modify_kpoints_params), 
                                            parents = calcs, name='%s-EVcheck_QHA' %structure.composition.reduced_formula)
                    fws.append(check_result)
                    strname = "{}:{}".format(structure.composition.reduced_formula, 'EV_QHA_Append')
                    wfs = Workflow(fws, name = strname, metadata=metadata)
                    if modify_incar_params != {}:
                        from dfttk.utils import add_modify_incar_by_FWname
                        add_modify_incar_by_FWname(wfs, modify_incar_params = modify_incar_params)
                    if modify_kpoints_params != {}:
                        from dfttk.utils import add_modify_kpoints_by_FWname
                        add_modify_kpoints_by_FWname(wfs, modify_kpoints_params = modify_kpoints_params)
                    lpad.add_wf(wfs)
                else:
                    print('''

#######################################################################
#                                                                     #
#            Too many appended VASP running times, abort!             #
#                      Please check VASP setting!                     #
#                                                                     #
#######################################################################

                         ''')
            else:  # No need to do more VASP calculation, QHA could be running 
                print('Success in Volumes-Energies checking, enter QHA ...')
                # Debye
                debye_fw = Firework(QHAAnalysis(phonon=False, t_min=t_min, t_max=t_max, t_step=t_step, db_file=db_file, tag=tag, metadata=metadata), 
                                    name="{}-qha_analysis-Debye".format(structure.composition.reduced_formula))
                fws.append(debye_fw)
                if phonon:
                    phonon_supercell_matrix = self.get('phonon_supercell_matrix')
                    # do a Debye run before the phonon, so they can be done in stages.
                    phonon_fw = Firework(QHAAnalysis(phonon=True, t_min=t_min, t_max=t_max, t_step=t_step, db_file=db_file, tag=tag, 
                                                     metadata=metadata), parents=debye_fw, name="{}-qha_analysis-phonon".format(structure.composition.reduced_formula))
                    fws.append(phonon_fw)
                strname = "{}:{}".format(structure.composition.reduced_formula, 'QHA')
                wfs = Workflow(fws, name = strname, metadata=metadata)
                lpad.add_wf(wfs)
        else:   # failure to meet the tolerance
            if len(volumes) == 0: #self.error == 1e10:   # Bad initial running set
                print('''

#######################################################################
#                                                                     #
#  "passinitrun = True" could not set while initial results absent.   #
#                                                                     #
#######################################################################

                      
                      ''')
            else:                      # fitting fails
                print('''

#######################################################################
#                                                                     #
#           Can not achieve the tolerance requirement, abort!         #
#                                                                     #
#######################################################################

                      ''')
        import json
        with open('E-V check_summary.json', 'w') as fp:
            json.dump(EVcheck_result, fp)
Exemplo n.º 11
0
                "spin_multiplicity": 1
            },
            fw_id=2)
        fw3 = Firework(
            [task_freq_esp],
            name='Gaussian Frequency and ESP',
            spec={
                "mol_name": os.path.splitext(file_name)[0],
                "charge": 0,
                "spin_multiplicity": 1
            },
            fw_id=3)
        fw4 = Firework([task_lammps_inp],
                       name='Run Lammps',
                       spec={"molecule": mol_with_site_prop},
                       fw_id=4)
        fw5 = Firework([task_lammps_log_dbinsert],
                       name='Lammps Log Parsing',
                       fw_id=5)
        fw6 = Firework([task_lammps_prop_dbinsert],
                       name='Lammps Properties Parser',
                       fw_id=6)

        depen = {1: 2, 2: 3, 3: 4, 4: [5, 6]}
        wf = Workflow([fw1, fw2, fw3, fw4, fw5, fw6],
                      name="LAMMPS",
                      links_dict=depen)

        lp = LaunchPad.auto_load()
        lp.add_wf(wf)
Exemplo n.º 12
0
    def __init__(self, *args, **kwargs):
        super(OptTask, self).__init__(*args, **kwargs)

        # Configuration attrs
        lp = self.get("launchpad", LaunchPad.auto_load())
        if isinstance(lp, LaunchPad):
            lp = lp.to_dict()
        self.lpad = LaunchPad.from_dict(lp)
        self.opt_label = self.get("opt_label", "opt_default")
        self.c = getattr(self.lpad.db, self.opt_label)
        self.config = self.c.find_one({"doctype": "config"})
        if self.config is None:
            raise NotConfiguredError(
                "Please use MissionControl().configure to "
                "configure the optimization database "
                "({} - {}) before running OptTask."
                "".format(self.lpad.db, self.opt_label))
        self.wf_creator = deserialize(self.config["wf_creator"])
        self.x_dims = self.config["dimensions"]
        self._xdim_types = self.config["dim_types"]
        self.is_discrete_all = self.config["is_discrete_all"]
        self.is_discrete_any = self.config["is_discrete_any"]
        self.wf_creator_args = self.config["wf_creator_args"] or []
        self.wf_creator_kwargs = self.config["wf_creator_kwargs"] or {}
        self.predictor = self.config["predictor"]
        self.predictor_args = self.config["predictor_args"] or []
        self.predictor_kwargs = self.config["predictor_kwargs"] or {}
        self.maximize = self.config["maximize"]
        self.n_search_pts = self.config["n_search_pts"]
        self.n_train_pts = self.config["n_train_pts"]
        self.n_bootstraps = self.config["n_bootstraps"]
        self.acq = self.config["acq"]
        self.space_file = self.config["space_file"]
        self.onehot_categorical = self.config["onehot_categorical"]
        self.duplicate_check = self.config["duplicate_check"]
        self.get_z = self.config["get_z"]
        if self.get_z:
            self.get_z = deserialize(self.config["get_z"])
        else:
            self.get_z = lambda *ars, **kws: []
        self.get_z_args = self.config["get_z_args"] or []
        self.get_z_kwargs = self.config["get_z_kwargs"] or {}
        self.z_file = self.config["z_file"]
        self.enforce_sequential = self.config["enforce_sequential"]
        self.tolerances = self.config["tolerances"]
        self.batch_size = self.config["batch_size"]
        self.timeout = self.config["timeout"]

        # Declared attrs
        self.n_objs = None
        self.builtin_predictors = {p.__name__: p for p in BUILTIN_PREDICTORS}
        self._n_cats = 0
        self._encoding_info = []

        # Query formats
        self._completed = {
            "x": {
                "$exists": 1
            },
            "y": {
                "$exists": 1,
                "$ne": "reserved"
            },
            "z": {
                "$exists": 1
            },
        }
        self._manager = {"lock": {"$exists": 1}, "queue": {"$exists": 1}}
Exemplo n.º 13
0
"""

# Parameters:
box_scale = 8.9 # edge length of MD box in Angstroms, can also be a numpy array that scales the lattice
packmol_path = "~/packmol/packmol/packmol" # Revise as appropriate
structure = {'H2O':20} # "structure" in this context can be a dict of number of atoms or molecules.
temperature = 320

# Note one can use a pymatgen Structure object also
# E.g. p = Poscar.from_file("POSCAR")
#      structure = p.structure

copy_calcs = True # MD runs can be backed up in a desired location
calc_home = '~/test_H2O_wflows' # This is the location to copy the calculations if copy_calcs=True

# Since we specified a molecule, we must also give the path to xyz
# file of a single sample molecule.
xyz_paths = ['H2O.xyz']
name = 'H2O_df_'+str(temperature)


from mpmorph.workflow.workflows import get_wf_density
from fireworks import LaunchPad

amorphous_maker_params = {'box_scale':box_scale, 'packmol_path':packmol_path, 'xyz_paths': xyz_paths, 'tol': 2.0}

wf = get_wf_density(structure, temperature=temperature, pressure_threshold=0.5, nsteps=1000, wall_time=19200, max_rescales=5,
                    amorphous_maker_params=amorphous_maker_params, copy_calcs=copy_calcs, calc_home=calc_home, name=name)

lp = LaunchPad.auto_load()
lp.add_wf(wf)
Exemplo n.º 14
0
def main():
    import argparse
    parser = argparse.ArgumentParser(
        description="Run A QChem Job for a QChem Input File")
    parser.add_argument(
        "-d",
        "--directory",
        dest="directory",
        type=str,
        required=True,
        help=
        "the directory contains all the QChem jobs to be pretended to run again"
    )
    parser.add_argument("-p",
                        "--priority",
                        dest="priority",
                        type=int,
                        default=100,
                        help="the FireWorks priority")
    parser.add_argument("-b",
                        "--batch_size",
                        dest="batch_size",
                        type=int,
                        default=100,
                        help="the number of FireWorks in a Workflow")
    options = parser.parse_args()

    fw_priority = options.priority
    batch_size = options.batch_size

    lp = LaunchPad.auto_load()

    src_dir = os.path.abspath(options.directory)
    src_dir_sub_dirs = glob.glob(os.path.join(src_dir, "*"))
    num_dirs = len(src_dir_sub_dirs)
    current_fwid = 1
    links_dict = dict()
    fws_all = []
    num_fw_in_current_batch = 0
    batch_num = 1
    for i, sd in enumerate(src_dir_sub_dirs):
        if not os.path.isdir(sd):
            continue
        fw_json_filename = os.path.join(sd, "FW.json")
        if not (os.path.exists(fw_json_filename)
                or os.path.exists(fw_json_filename + ".gz")):
            continue
        with zopen(zpath(fw_json_filename), 'rt') as f:
            fw_dict = json.load(f)
        print("{percent:4.2%} completed, processing directory {d:s}, "
              "molecule name {molname:s}," \
              " mission {mission:s}".format(percent=i / float(num_dirs), d=sd,
                                            molname=
                                            fw_dict['spec']['user_tags'][
                                                'molname'],
                                            mission=
                                            fw_dict['spec']['user_tags'][
                                                'mission']))

        molname = fw_dict['spec']['user_tags']['molname']
        egsnl_tasks = [AddEGSNLTask()]
        if 'mol' in fw_dict:
            mol = Molecule.from_dict(fw_dict['spec']['mol'])
        else:
            mol = Molecule.from_dict(
                fw_dict['spec']['qcinp']['jobs'][0]['molecule'])
        snl = StructureNL(mol, "Xiaohui Qu <*****@*****.**>",
                          "Electrolyte Genome")
        egsnl_task_spec = {
            'task_type': 'Add to SNL database',
            'snl': snl.as_dict(),
            '_category': 'Parse Previous QChem Job',
            '_priority': fw_priority
        }
        snl_fw_id = current_fwid
        current_fwid += 1
        fws_all.append(
            Firework(
                egsnl_tasks,
                egsnl_task_spec,
                name=get_slug(molname +
                              ' -- Add to SNL database For fake QChem Task'),
                fw_id=snl_fw_id))

        fake_qchem_tasks = [FakeRunQChemTask()]
        src_qchem_dir = sd
        fake_qchem_spec = {
            '_priority': fw_priority * 2,
            'src_qchem_dir': src_qchem_dir,
            '_category': 'Parse Previous QChem Job',
            'run_tags': fw_dict['spec']['run_tags'],
            'implicit_solvent': fw_dict['spec']['implicit_solvent'],
            'task_type': fw_dict['spec']['task_type'],
            'charge': fw_dict['spec']['charge'],
            'spin_multiplicity': fw_dict['spec']['spin_multiplicity'],
            'num_atoms': fw_dict['spec']['num_atoms'],
            'user_tags': fw_dict['spec']['user_tags'],
            'mol': mol.as_dict(),
            'inchi': fw_dict['spec']['inchi'],
            '_dupefinder': fw_dict['spec']['_dupefinder'],
            'qcinp': fw_dict['spec']['qcinp'],
            'qm_method': fw_dict['spec']['qm_method'],
            'inchi_root': fw_dict['spec']['inchi_root']
        }
        for k in ['mixed_basis', 'mixed_aux_basis']:
            if k in fw_dict['spec']:
                fake_qchem_spec[k] = fw_dict['spec'][k]
        fake_qchem_fw_id = current_fwid
        current_fwid += 1
        fws_all.append(
            Firework(fake_qchem_tasks,
                     fake_qchem_spec,
                     name='Fake' + fw_dict['name'],
                     fw_id=fake_qchem_fw_id))
        links_dict[snl_fw_id] = fake_qchem_fw_id

        num_fw_in_current_batch += 1
        if num_fw_in_current_batch >= 100:
            wf = Workflow(fws_all, links_dict,
                          "Read Previous QChem Jobs Id-{}".format(batch_num))
            lp.add_wf(wf)
            batch_num += 1
            links_dict = dict()
            fws_all = []
            num_fw_in_current_batch = 0

    if num_fw_in_current_batch > 0:
        wf = Workflow(fws_all, links_dict, "Read Previous QChem Jobs")
        lp.add_wf(wf)
Exemplo n.º 15
0
import numpy as np
from pymatgen import Structure
from fireworks import LaunchPad, Workflow
from atomate.vasp.powerups import add_modify_incar, add_tags
from atomate.vasp.workflows.base.ferroelectric import get_wf_ferroelectric

comp = 'LaSnO2N'
np_struct = Structure.from_file(comp + '_hex_30.vasp')
p_struct = Structure.from_file(comp + '_dist.vasp')
wf = get_wf_ferroelectric(p_struct,
                          np_struct,
                          vasp_cmd='ibrun tacc_affinity vasp_std',
                          add_analysis_task=True,
                          tags=[comp],
                          db_file='/scratch/04391/tg836903/ilmenites/db.json')
wf = add_modify_incar(wf,
                      modify_incar_params={
                          'incar_update': {
                              'Algo': 'Normal',
                              'EDIFF': 0.00001,
                              'NEDOS': 3000
                          }
                      })

lpad = LaunchPad.auto_load()  # loads this based on the FireWorks configuration
lpad.add_wf(wf)
Exemplo n.º 16
0
def run(args):
    """
    Run dfttk
    Currently, only support get_wf_gibbs

    Parameters
        STR_FOLDER = args.STRUCTURE_FOLDER
            folder/file containing structures
        MATCH_PATTERN = args.MATCH_PATTERN
            Match patterns for structure file, e.g. *POSCAR
        RECURSIVE = args.RECURSIVE
            recursive or not
        WORKFLOW = args.WORKFLOW
            workflow, current only get_wf_gibbs
        LAUNCH = args.LAUNCH
            Launch to lpad or not
        MAX_JOB = args.MAX_JOB
            Max job to submit
        SETTINGS = args.SETTINGS
            Settings file
        WRITE_OUT_WF = args.WRITE_OUT_WF
            Write out wf file or not
    """
    STR_FOLDER = args.STRUCTURE_FOLDER  # folder/file containing structures
    MATCH_PATTERN = args.MATCH_PATTERN  # Match patterns for structure file, e.g. *POSCAR
    RECURSIVE = args.RECURSIVE  # recursive or not
    WORKFLOW = args.WORKFLOW  # workflow, current only get_wf_gibbs
    PHONON = args.PHONON  # run phonon
    LAUNCH = args.LAUNCH  # Launch to lpad or not
    MAX_JOB = args.MAX_JOB  # Max job to submit
    SETTINGS = args.SETTINGS  # Settings file
    WRITE_OUT_WF = args.WRITE_OUT_WF  # Write out wf file or not
    TAG = args.TAG  # Metadata from the command line
    APPEND = args.APPEND  # Append calculations, e.g. appending volumes or phonon or born
    db_file = args.db_file  # user supplier db_file such as db.json

    if not db_file:
        if os.path.exists('db.json'):
            db_file = 'db.json'
    elif not os.path.exists(db_file):
        db_file = None

    ## Initial wfs and metadatas
    wfs = []
    metadatas = {}

    if APPEND:
        if TAG:
            metadatas = {
                os.path.join(os.path.abspath('./'), 'POSCAR'): {
                    'tag': TAG
                }
            }
        elif os.path.exists('METADATAS.yaml'):
            metadatas = loadfn('METADATAS.yaml')
        else:
            raise ValueError(
                'For APPEND model, please provide TAG with -tag or provide METADATAS.yaml file'
            )
        for keyi in metadatas:
            (STR_PATH, STR_FILENAME_WITH_EXT) = os.path.split(keyi)
            (STR_FILENAME, STR_EXT) = os.path.splitext(STR_FILENAME_WITH_EXT)
            user_settings = get_user_settings(STR_FILENAME_WITH_EXT,
                                              STR_PATH=STR_PATH,
                                              NEW_SETTING=SETTINGS)
            metadata = user_settings.get('metadata', {})
            metadata.update(metadatas[keyi])
            user_settings.update({'metadata': metadata})
            structure = get_eq_structure_by_metadata(metadata=metadata,
                                                     db_file=db_file)
            if structure is None:
                raise FileNotFoundError(
                    'There is no static results under current metadata tag({})'
                    .format(metadata['tag']))
            if PHONON:
                user_settings.update({'phonon': True})
            phonon_supercell_matrix = user_settings.get(
                'phonon_supercell_matrix', None)
            if phonon_supercell_matrix is None:
                user_settings.update({"phonon_supercell_matrix": "atoms"})

            wf = get_wf_single(structure,
                               WORKFLOW=WORKFLOW,
                               settings=user_settings,
                               db_file=db_file)

            wf = Customizing_Workflows(wf,
                                       powerups_options=user_settings.get(
                                           'powerups', None))
            if isinstance(wf, list):
                wfs = wfs + wf
            else:
                wfs.append(wf)

            if WRITE_OUT_WF:
                dfttk_wf_filename = os.path.join(
                    STR_PATH, "dfttk_wf-" + STR_FILENAME_WITH_EXT + ".yaml")
                dumpfn(wf, dfttk_wf_filename)
    else:
        if os.path.exists('METADATAS.yaml'):
            metadatas = loadfn('METADATAS.yaml')
        ## Get the file names of files
        STR_FILES = get_structure_file(STR_FOLDER=STR_FOLDER,
                                       RECURSIVE=RECURSIVE,
                                       MATCH_PATTERN=MATCH_PATTERN)
        ## generat the wf
        for STR_FILE in STR_FILES:
            (STR_PATH, STR_FILENAME_WITH_EXT) = os.path.split(STR_FILE)
            (STR_FILENAME, STR_EXT) = os.path.splitext(STR_FILENAME_WITH_EXT)
            str_filename = STR_FILENAME.lower()
            if (str_filename.endswith("-" + SETTINGS.lower())
                    or str_filename.startswith(SETTINGS.lower() + "-")
                    or (str_filename == SETTINGS.lower())):
                print(
                    STR_FILE +
                    " is a setting file, not structure file, and skipped when reading the structure."
                )
            elif STR_FILE == os.path.abspath(__file__):
                #This is current file
                pass
            else:
                flag_run = False
                try:
                    structure = Structure.from_file(STR_FILE)
                    flag_run = True
                except Exception as e:
                    warnings.warn("The name or the contant of " + STR_FILE + " is not supported by dfttk, and skipped. " + \
                        "Ref. https://pymatgen.org/pymatgen.core.structure.html#pymatgen.core.structure.IStructure.from_file")

                if flag_run:
                    user_settings = get_user_settings(STR_FILENAME_WITH_EXT,
                                                      STR_PATH=STR_PATH,
                                                      NEW_SETTING=SETTINGS)
                    metadatai = metadatas.get(STR_FILE, None)
                    if metadatai:
                        user_settings.update({'metadata': metadatai})
                    if PHONON:
                        user_settings.update({'phonon': True})
                    phonon_supercell_matrix = user_settings.get(
                        'phonon_supercell_matrix', None)
                    if phonon_supercell_matrix is None:
                        user_settings.update(
                            {"phonon_supercell_matrix": "atoms"})

                    wf = get_wf_single(structure,
                                       WORKFLOW=WORKFLOW,
                                       settings=user_settings)
                    wf = Customizing_Workflows(
                        wf,
                        powerups_options=user_settings.get('powerups', None))
                    metadatas[STR_FILE] = wf.as_dict()["metadata"]
                    wfs.append(wf)

                    if WRITE_OUT_WF:
                        dfttk_wf_filename = os.path.join(
                            STR_PATH,
                            "dfttk_wf-" + STR_FILENAME_WITH_EXT + ".yaml")
                        dumpfn(wf.to_dict(), dfttk_wf_filename)

        #Write Out the metadata for POST and continue purpose
        dumpfn(metadatas, "METADATAS.yaml")
    """
    _fws = []
    for wflow in wfs:
        revised_wflow = Customizing_Workflows(wflow,user_settings={})
        _fws.append(revised_wflow)
    fws = _fws
    """

    if LAUNCH:
        from fireworks import LaunchPad
        lpad = LaunchPad.auto_load()

        for wflow in wfs:
            lpad.add_wf(wflow)

        if MAX_JOB:
            # Not False or Empty
            if MAX_JOB == 1:
                os.system("qlaunch singleshot")
            else:
                os.system("qlaunch rapidfire -m " + str(MAX_JOB))
Exemplo n.º 17
0
    [cmsa.matches_ordering(s) for s in matched_structures].index(True)

    # Enforce all magmom magnitudes to match the gs
    for s in matched_structures:
        ms = s.site_properties["magmom"]
        magmoms = [np.sign(m1) * m2 for m1, m2 in zip(ms, gs_magmoms)]
        s.add_site_property("magmom", magmoms)

    return matched_structures, input_index, ordered_structure_origins


if __name__ == "__main__":

    # for trying workflows

    from fireworks import LaunchPad

    latt = Lattice.cubic(4.17)
    species = ["Ni", "O"]
    coords = [[0.00000, 0.00000, 0.00000], [0.50000, 0.50000, 0.50000]]
    NiO = Structure.from_spacegroup(225, latt, species, coords)

    wf_deformation = get_wf_magnetic_deformation(NiO)

    wf_orderings = MagneticOrderingsWF(NiO).get_wf()

    lpad = LaunchPad.auto_load()
    lpad.add_wf(wf_orderings)
    lpad.add_wf(wf_deformation)
Exemplo n.º 18
0
    def run_task(self, fw_spec):
        '''
        run_num: maximum number of appending VASP running; this limitation is to avoid always running due to bad settings;
            only for internal usage;

        Important args:
        eos_tolerance: acceptable value for average RMS, recommend >= 0.005;
        threshold: total point number above the value should be reduced, recommend < 16 or much time to run;
        del_limited: maximum deletion ration for large results;
        vol_spacing: the maximum ratio step between two volumes, larger step will be inserted points to calculate;
        '''
        # Get the parameters from the object
        max_run = 10
        db_file = env_chk(self.get('db_file', DB_FILE),
                          fw_spec)  #always concrete db_fiel
        vasp_cmd = ">>vasp_cmd<<"  #chould change for user to provide the change
        deformations = self.get('deformations', [])
        run_num = self.get('run_num', 0)
        eos_tolerance = self.get('eos_tolerance', 0.005)
        threshold = self.get('threshold', 14)
        del_limited = self.get('del_limited', 0.3)
        vol_spacing = self.get('vol_spacing', 0.05)
        t_min = self.get('t_min', 5)
        t_max = self.get('t_max', 2000)
        t_step = self.get('t_step', 5)
        phonon = self.get('phonon', False)
        force_phonon = self.get('force_phonon', False)
        phonon_supercell_matrix = self.get('phonon_supercell_matrix', None)
        verbose = self.get('verbose', False)
        modify_kpoints_params = self.get('modify_kpoints_params', {})
        site_properties = self.get('site_properties', None)

        modify_incar_params = self.get('modify_incar_params', {})
        powerups_options = modify_incar_params.get('powerups', None)

        override_default_vasp_params = self.get('override_default_vasp_params',
                                                {})
        user_incar_settings = override_default_vasp_params.get(
            'user_incar_settings', {})
        powerups_options = user_incar_settings.get('powerups',
                                                   powerups_options)

        override_symmetry_tolerances = self.get('override_symmetry_tolerances',
                                                {})
        store_volumetric_data = self.get('store_volumetric_data', False)

        stable_tor = self.get('stable_tor', 0.01)
        force_phonon = self.get('force_phonon', False)
        test = self.get('test', False)

        relax_structure = self.get('structure') or fw_spec.get(
            'structure', None)
        relax_scheme = self.get('relax_scheme') or fw_spec.get(
            'relax_scheme', [2])
        relax_phonon = fw_spec.get('relax_phonon', False)

        #Only set phonon=True and ISIF=4 passed, then run phonon
        if not force_phonon:
            phonon = phonon and relax_phonon

        metadata = self.get('metadata', {})
        tag = self.get('tag', metadata.get('tag', None))
        if tag is None:
            tag = str(uuid4())
            metadata['tag'] = tag

        common_kwargs = {
            'vasp_cmd': vasp_cmd,
            'db_file': self.get('db_file', DB_FILE),
            "metadata": metadata,
            "tag": tag,
            'override_default_vasp_params': override_default_vasp_params,
        }
        vasp_kwargs = {
            'modify_incar_params': modify_incar_params,
            'modify_kpoints_params': modify_kpoints_params
        }
        t_kwargs = {'t_min': t_min, 't_max': t_max, 't_step': t_step}
        eos_kwargs = {
            'vol_spacing': vol_spacing,
            'eos_tolerance': eos_tolerance,
            'threshold': 14
        }

        run_num += 1

        #Some initial checks
        #TODO: add phonon after RobustOptimizeFW
        if phonon:
            #To check if the consistent of phonon and optimize
            if not consistent_check_db(db_file, tag):
                print('Please check DB, DFTTK running ended!')
                return

        if relax_structure is not None:
            structure = deepcopy(relax_structure)
        else:
            raise ValueError(
                'Not structure in spec, please provide structure as input')

        if site_properties:
            for pkey in site_properties:
                structure.add_site_property(pkey, site_properties[pkey])
        # get original EV curve
        volumes, energies, dos_objs = self.get_orig_EV(db_file, tag)
        vol_adds = check_deformations_in_volumes(deformations, volumes,
                                                 structure.volume)
        if (len(vol_adds)) == 0:
            self.check_points(db_file, metadata, eos_tolerance, threshold,
                              del_limited, volumes, energies, verbose)
        else:
            self.correct = True
            self.error = 1e10

        EVcheck_result = init_evcheck_result(append_run_num=run_num,
                                             correct=self.correct,
                                             volumes=volumes,
                                             energies=energies,
                                             eos_tolerance=eos_tolerance,
                                             threshold=threshold,
                                             vol_spacing=vol_spacing,
                                             error=self.error,
                                             metadata=metadata)

        if self.correct:
            vol_orig = structure.volume
            if (len(vol_adds)) == 0:
                volume, energy, dos_obj = gen_volenergdos(
                    self.points, volumes, energies, dos_objs)
                vol_adds = self.check_vol_coverage(
                    volume, vol_spacing, vol_orig, run_num, energy, structure,
                    dos_obj, phonon, db_file, tag, t_min, t_max, t_step,
                    EVcheck_result)  # Normalized to 1
                EVcheck_result['selected'] = volume
                EVcheck_result['append'] = (vol_adds).tolist()
                # Marked as adopted in db
                mark_adopted(tag, db_file, volume, phonon=phonon)
            lpad = LaunchPad.auto_load()
            fws = []
            if len(vol_adds) > 0:  # VASP calculations need to append
                if run_num < max_run:
                    # Do VASP and check again
                    print(
                        'Appending the volumes of : %s to calculate in VASP!' %
                        (vol_adds).tolist())
                    calcs = []
                    #vis_relax = RelaxSet(structure)
                    #vis_static = StaticSet(structure)
                    #isif2 = 5 if 'infdet' in relax_path else 4
                    for vol_add in vol_adds:
                        struct = deepcopy(structure)
                        struct.scale_lattice(structure.volume * vol_add)

                        relax_parents_fw = None
                        for isif_i in relax_scheme:
                            #record_path=record_path
                            relax_fw = OptimizeFW(
                                struct,
                                isif=isif_i,
                                store_volumetric_data=store_volumetric_data,
                                name="relax_Vol{:.3f}".format(vol_add),
                                vasp_input_set=None,
                                job_type="normal",
                                override_symmetry_tolerances=
                                override_symmetry_tolerances,
                                prev_calc_loc=True,
                                parents=relax_parents_fw,
                                db_insert=False,
                                force_gamma=True,
                                modify_incar={},
                                **vasp_kwargs,
                                **common_kwargs)
                            relax_parents_fw = deepcopy(relax_fw)
                            fws.append(relax_fw)
                            calcs.append(relax_fw)

                        static_fw = StaticFW(
                            struct,
                            isif=relax_scheme[-1],
                            name='static_Vol{:.3f}'.format(vol_add),
                            vasp_input_set=None,
                            prev_calc_loc=True,
                            parents=relax_parents_fw,
                            store_volumetric_data=store_volumetric_data,
                            **common_kwargs)
                        fws.append(static_fw)
                        calcs.append(static_fw)

                        if phonon:
                            #visphonon = ForceConstantsSet(struct)
                            phonon_fw = PhononFW(
                                struct,
                                phonon_supercell_matrix,
                                vasp_input_set=None,
                                stable_tor=stable_tor,
                                name='structure_{:.3f}-phonon'.format(vol_add),
                                prev_calc_loc=True,
                                parents=static_fw,
                                **t_kwargs,
                                **common_kwargs)
                            fws.append(phonon_fw)
                            calcs.append(phonon_fw)
                    check_result = Firework(
                        EVcheck_QHA(
                            structure=relax_structure,
                            relax_scheme=relax_scheme,
                            store_volumetric_data=store_volumetric_data,
                            run_num=run_num,
                            verbose=verbose,
                            site_properties=site_properties,
                            stable_tor=stable_tor,
                            phonon=phonon,
                            phonon_supercell_matrix=phonon_supercell_matrix,
                            force_phonon=force_phonon,
                            **eos_kwargs,
                            **vasp_kwargs,
                            **t_kwargs,
                            **common_kwargs),
                        parents=calcs,
                        name='{}-EVcheck_QHA'.format(
                            structure.composition.reduced_formula))
                    fws.append(check_result)
                    strname = "{}:{}".format(
                        structure.composition.reduced_formula, 'EV_QHA_Append')
                    wfs = Workflow(fws, name=strname, metadata=metadata)

                    if modify_incar_params != {}:
                        from dfttk.utils import add_modify_incar_by_FWname
                        add_modify_incar_by_FWname(
                            wfs, modify_incar_params=modify_incar_params)
                    if modify_kpoints_params != {}:
                        from dfttk.utils import add_modify_kpoints_by_FWname
                        add_modify_kpoints_by_FWname(
                            wfs, modify_kpoints_params=modify_kpoints_params)
                    wfs = Customizing_Workflows(
                        wfs, powerups_options=powerups_options)
                    if not test: lpad.add_wf(wfs)
                else:
                    too_many_run_error()
            else:  # No need to do more VASP calculation, QHA could be running
                print('Success in Volumes-Energies checking, enter QHA ...')
                debye_fw = Firework(QHAAnalysis(phonon=phonon,
                                                t_min=t_min,
                                                t_max=t_max,
                                                t_step=t_step,
                                                db_file=self.get(
                                                    'db_file', DB_FILE),
                                                tag=tag,
                                                metadata=metadata),
                                    name="{}-qha_analysis".format(
                                        structure.composition.reduced_formula))
                fws.append(debye_fw)
                '''
                # Debye
                debye_fw = Firework(QHAAnalysis(phonon=False, t_min=t_min, t_max=t_max, t_step=t_step, db_file=db_file, tag=tag, metadata=metadata),
                                    name="{}-qha_analysis-Debye".format(structure.composition.reduced_formula))
                fws.append(debye_fw)
                if phonon:
                    phonon_supercell_matrix = self.get('phonon_supercell_matrix')
                    # do a Debye run Staticore the phonon, so they can be done in stages.
                    phonon_fw = Firework(QHAAnalysis(phonon=True, t_min=t_min, t_max=t_max, t_step=t_step, db_file=db_file, tag=tag,
                                                     metadata=metadata), parents=debye_fw, name="{}-qha_analysis-phonon".format(structure.composition.reduced_formula))
                    fws.append(phonon_fw)
                '''
                strname = "{}:{}".format(structure.composition.reduced_formula,
                                         'QHA')
                wfs = Workflow(fws, name=strname, metadata=metadata)
                wfs = Customizing_Workflows(wfs,
                                            powerups_options=powerups_options)
                if not test: lpad.add_wf(wfs)
        else:  # failure to meet the tolerance
            if len(volumes
                   ) == 0:  #self.error == 1e10:   # Bad initial running set
                pass_result_error()
            else:  # fitting fails
                tol_error()
        import json
        with open('EV_check_summary.json', 'w') as fp:
            json.dump(EVcheck_result, fp, indent=4)
Exemplo n.º 19
0
        if scan:
            wf_name += " - SCAN"
        wf = Workflow(fws, name=wf_name)

        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})

        tag = "magnetic_orderings group: >>{}<<".format(self.uuid)
        wf = add_tags(wf, [tag, ordered_structure_origins])

        return wf


if __name__ == "__main__":

    # for trying workflows

    from fireworks import LaunchPad

    latt = Lattice.cubic(4.17)
    species = ["Ni", "O"]
    coords = [[0.00000, 0.00000, 0.00000], [0.50000, 0.50000, 0.50000]]
    NiO = Structure.from_spacegroup(225, latt, species, coords)

    wf_deformation = get_wf_magnetic_deformation(NiO)

    wf_orderings = MagneticOrderingsWF(NiO).get_wf()

    lpad = LaunchPad.auto_load()
    lpad.add_wf(wf_orderings)
    lpad.add_wf(wf_deformation)
Exemplo n.º 20
0
def run(args):
    """
    Run dfttk
    Currently, only support get_wf_gibbs

    Parameters
        STR_FOLDER = args.STRUCTURE_FOLDER  
            folder/file containing structures
        MATCH_PATTERN = args.MATCH_PATTERN  
            Match patterns for structure file, e.g. *POSCAR
        RECURSIVE = args.RECURSIVE          
            recursive or not
        WORKFLOW = args.WORKFLOW            
            workflow, current only get_wf_gibbs
        LAUNCH = args.LAUNCH               
            Launch to lpad or not
        MAX_JOB = args.MAX_JOB              
            Max job to submit
        SETTINGS = args.SETTINGS            
            Settings file    
        WRITE_OUT_WF = args.WRITE_OUT_WF    
            Write out wf file or not
    """
    STR_FOLDER = args.STRUCTURE_FOLDER  # folder/file containing structures
    MATCH_PATTERN = args.MATCH_PATTERN  # Match patterns for structure file, e.g. *POSCAR
    RECURSIVE = args.RECURSIVE  # recursive or not
    WORKFLOW = args.WORKFLOW  # workflow, current only get_wf_gibbs
    LAUNCH = args.LAUNCH  # Launch to lpad or not
    MAX_JOB = args.MAX_JOB  # Max job to submit
    SETTINGS = args.SETTINGS  # Settings file
    WRITE_OUT_WF = args.WRITE_OUT_WF  # Write out wf file or not

    ## Get the file names of files
    STR_FILES = get_structure_file(STR_FOLDER=STR_FOLDER,
                                   RECURSIVE=RECURSIVE,
                                   MATCH_PATTERN=MATCH_PATTERN)

    ## Initial wfs and metadatas
    wfs = []
    metadatas = {}

    ## generat the wf
    for STR_FILE in STR_FILES:
        (STR_PATH, STR_FILENAME_WITH_EXT) = os.path.split(STR_FILE)
        (STR_FILENAME, STR_EXT) = os.path.splitext(STR_FILENAME_WITH_EXT)
        str_filename = STR_FILENAME.lower()
        if (str_filename.endswith("-" + SETTINGS.lower())
                or str_filename.startswith(SETTINGS.lower() + "-")
                or (str_filename == SETTINGS.lower())):
            print(
                STR_FILE +
                " is a setting file, not structure file, and skipped when reading the structure."
            )
        elif STR_FILE == os.path.abspath(__file__):
            #This is current file
            pass
        else:
            flag_run = False
            try:
                structure = Structure.from_file(STR_FILE)
                flag_run = True
            except Exception as e:
                warnings.warn("The name or the contant of " + STR_FILE + " is not supported by dfttk, and skipped. " + \
                    "Ref. https://pymatgen.org/pymatgen.core.structure.html#pymatgen.core.structure.IStructure.from_file")

            if flag_run:
                user_settings = get_user_settings(STR_FILENAME,
                                                  STR_PATH=STR_PATH,
                                                  NEW_SETTING=SETTINGS)

                wf = get_wf_single(structure,
                                   WORKFLOW=WORKFLOW,
                                   settings=user_settings)

                metadatas[STR_FILE] = wf.as_dict()["metadata"]
                wfs.append(wf)

                if WRITE_OUT_WF:
                    dfttk_wf_filename = os.path.join(
                        STR_PATH, "dfttk_wf-" + STR_FILENAME + ".yaml")
                    wf.to_file(dfttk_wf_filename)

    #Write Out the metadata for POST purpose
    dumpfn(metadatas, "METADATAS.yaml")

    if LAUNCH:
        from fireworks import LaunchPad
        lpad = LaunchPad.auto_load()

        for wflow in wfs:
            lpad.add_wf(wflow)
        if MAX_JOB:
            # Not False or Empty
            if MAX_JOB == 1:
                os.system("qlaunch singleshot")
            else:
                os.system("qlaunch rapidfire -m " + str(MAX_JOB))