Ejemplo n.º 1
0
    def _match_material(self, taskdoc):
        """
        Returns the material_id that has the same structure as this task as
         determined by the structure matcher. Returns None if no match.

        Args:
            taskdoc (dict): a JSON-like task document

        Returns:
            (int) matching material_id or None
        """
        formula = taskdoc["formula_reduced_abc"]

        if "parent_structure" in taskdoc:  # this is used to intentionally combine multiple data w/same formula but slightly different structure, e.g. from an ordering scheme
            t_struct = Structure.from_dict(taskdoc["parent_structure"]["structure"])
            q = {"formula_reduced_abc": formula, "parent_structure.spacegroup.number": taskdoc["parent_structure"]["spacegroup"]["number"]}
        else:
            sgnum = taskdoc["output"]["spacegroup"]["number"]
            t_struct = Structure.from_dict(taskdoc["output"]["structure"])
            q = {"formula_reduced_abc": formula, "sg_number": sgnum}

        for m in self._materials.find(q, {"parent_structure": 1, "structure": 1, "material_id": 1}):
            s_dict = m["parent_structure"]["structure"] if "parent_structure" in m else m["structure"]
            m_struct = Structure.from_dict(s_dict)
            sm = StructureMatcher(ltol=0.2, stol=0.3, angle_tol=5,
                                  primitive_cell=True, scale=True,
                                  attempt_supercell=False, allow_subset=False,
                                  comparator=ElementComparator())

            if sm.fit(m_struct, t_struct):
                return m["material_id"]

        return None
Ejemplo n.º 2
0
    def run_task(self, fw_spec):

        from pymatgen.analysis.eos import EOS

        tag = self["tag"]
        db_file = env_chk(self.get("db_file"), fw_spec)
        summary_dict = {"eos": self["eos"]}

        mmdb = MMVaspDb.from_db_file(db_file, admin=True)
        # get the optimized structure
        d = mmdb.collection.find_one({"task_label": "{} structure optimization".format(tag)})
        structure = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
        summary_dict["structure"] = structure.as_dict()

        # get the data(energy, volume, force constant) from the deformation runs
        docs = mmdb.collection.find({"task_label": {"$regex": "{} bulk_modulus*".format(tag)},
                                     "formula_pretty": structure.composition.reduced_formula})
        energies = []
        volumes = []
        for d in docs:
            s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
            energies.append(d["calcs_reversed"][-1]["output"]['energy'])
            volumes.append(s.volume)
        summary_dict["energies"] = energies
        summary_dict["volumes"] = volumes

        # fit the equation of state
        eos = EOS(self["eos"])
        eos_fit = eos.fit(volumes, energies)
        summary_dict["results"] = dict(eos_fit.results)

        with open("bulk_modulus.json", "w") as f:
            f.write(json.dumps(summary_dict, default=DATETIME_HANDLER))

        logger.info("BULK MODULUS CALCULATION COMPLETE")
Ejemplo n.º 3
0
    def _match_material(self, doc):
        """
        Returns the material_id that has the same structure as this doc as
         determined by the structure matcher. Returns None if no match.

        Args:
            doc: a JSON-like document

        Returns:
            (int) matching material_id or None
        """
        formula = doc["formula_reduced_abc"]
        sgnum = doc["spacegroup"]["number"]

        for m in self._materials.find({"formula_reduced_abc": formula, "sg_number": sgnum},
                                      {"structure": 1, "material_id": 1}):

            m_struct = Structure.from_dict(m["structure"])
            t_struct = Structure.from_dict(doc["structure"])

            sm = StructureMatcher(ltol=0.2, stol=0.3, angle_tol=5,
                                  primitive_cell=True, scale=True,
                                  attempt_supercell=False, allow_subset=False,
                                  comparator=ElementComparator())

            if sm.fit(m_struct, t_struct):
                return m["material_id"]

        return None
Ejemplo n.º 4
0
    def setUp(self):

        with open(os.path.join(test_dir, 'LobsterCompleteDos_spin.json'), 'r') as f:
            data_spin = json.load(f)
        self.LobsterCompleteDOS_spin = LobsterCompleteDos.from_dict(data_spin)

        with open(os.path.join(test_dir, 'LobsterCompleteDos_nonspin.json'), 'r') as f:
            data_nonspin = json.load(f)
        self.LobsterCompleteDOS_nonspin = LobsterCompleteDos.from_dict(data_nonspin)

        with open(os.path.join(test_dir, 'structure_KF.json'), 'r') as f:
            data_structure = json.load(f)
        self.structure = Structure.from_dict(data_structure)

        with open(os.path.join(test_dir, 'LobsterCompleteDos_MnO.json'), 'r') as f:
            data_MnO = json.load(f)
        self.LobsterCompleteDOS_MnO = LobsterCompleteDos.from_dict(data_MnO)

        with open(os.path.join(test_dir, 'LobsterCompleteDos_MnO_nonspin.json'), 'r') as f:
            data_MnO_nonspin = json.load(f)
        self.LobsterCompleteDOS_MnO_nonspin = LobsterCompleteDos.from_dict(data_MnO_nonspin)

        with open(os.path.join(test_dir, 'structure_MnO.json'), 'r') as f:
            data_MnO = json.load(f)
        self.structure_MnO = Structure.from_dict(data_MnO)
Ejemplo n.º 5
0
    def run_task(self, fw_spec):

        tag = self["tag"]
        db_file = env_chk(self.get("db_file"), fw_spec)
        t_step = self.get("t_step", 10)
        t_min = self.get("t_min", 0)
        t_max = self.get("t_max", 1000)
        mesh = self.get("mesh", [20, 20, 20])
        eos = self.get("eos", "vinet")
        qha_type = self.get("qha_type", "debye_model")
        pressure = self.get("pressure", 0.0)
        gibbs_summary_dict = {}

        mmdb = MMVaspDb.from_db_file(db_file, admin=True)
        # get the optimized structure
        d = mmdb.collection.find_one({"task_label": "{} structure optimization".format(tag)})
        structure = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
        gibbs_summary_dict["structure"] = structure.as_dict()

        # get the data(energy, volume, force constant) from the deformation runs
        docs = mmdb.collection.find({"task_label": {"$regex": "{} gibbs*".format(tag)},
                                     "formula_pretty": structure.composition.reduced_formula})
        energies = []
        volumes = []
        force_constants = []
        for d in docs:
            s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
            energies.append(d["calcs_reversed"][-1]["output"]['energy'])
            if qha_type not in ["debye_model"]:
                force_constants.append(d["calcs_reversed"][-1]["output"]['force_constants'])
            volumes.append(s.volume)
        gibbs_summary_dict["energies"] = energies
        gibbs_summary_dict["volumes"] = volumes
        if qha_type not in ["debye_model"]:
            gibbs_summary_dict["force_constants"] = force_constants

        G, T = None, None
        # use debye model
        if qha_type in ["debye_model"]:

            from atomate.tools.analysis import get_debye_model_gibbs

            G, T = get_debye_model_gibbs(energies, volumes, structure, t_min, t_step, t_max, eos,
                                         pressure)

        # use the phonopy interface
        else:

            from atomate.tools.analysis import get_phonopy_gibbs

            G, T = get_phonopy_gibbs(energies, volumes, force_constants, structure, t_min, t_step,
                                     t_max, mesh, eos, pressure)

        gibbs_summary_dict["G"] = G
        gibbs_summary_dict["T"] = T

        with open("gibbs.json", "w") as f:
            f.write(json.dumps(gibbs_summary_dict, default=DATETIME_HANDLER))
        logger.info("GIBBS FREE ENERGY CALCULATION COMPLETE")
Ejemplo n.º 6
0
    def run_task(self, fw_spec):

        from pymatgen.analysis.eos import EOS

        eos = self.get("eos", "vinet")
        tag = self["tag"]
        db_file = env_chk(self.get("db_file"), fw_spec)
        summary_dict = {"eos": eos}
        to_db = self.get("to_db", True)

        # collect and store task_id of all related tasks to make unique links with "tasks" collection
        all_task_ids = []

        mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
        # get the optimized structure
        d = mmdb.collection.find_one({"task_label": "{} structure optimization".format(tag)})
        all_task_ids.append(d["task_id"])
        structure = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
        summary_dict["structure"] = structure.as_dict()
        summary_dict["formula_pretty"] = structure.composition.reduced_formula

        # get the data(energy, volume, force constant) from the deformation runs
        docs = mmdb.collection.find({"task_label": {"$regex": "{} bulk_modulus*".format(tag)},
                                     "formula_pretty": structure.composition.reduced_formula})
        energies = []
        volumes = []
        for d in docs:
            s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
            energies.append(d["calcs_reversed"][-1]["output"]['energy'])
            volumes.append(s.volume)
            all_task_ids.append(d["task_id"])
        summary_dict["energies"] = energies
        summary_dict["volumes"] = volumes
        summary_dict["all_task_ids"] = all_task_ids

        # fit the equation of state
        eos = EOS(eos)
        eos_fit = eos.fit(volumes, energies)
        summary_dict["bulk_modulus"] = eos_fit.b0_GPa

        # TODO: find a better way for passing tags of the entire workflow to db - albalu
        if fw_spec.get("tags", None):
            summary_dict["tags"] = fw_spec["tags"]
        summary_dict["results"] = dict(eos_fit.results)
        summary_dict["created_at"] = datetime.utcnow()

        # db_file itself is required but the user can choose to pass the results to db or not
        if to_db:
            mmdb.collection = mmdb.db["eos"]
            mmdb.collection.insert_one(summary_dict)
        else:
            with open("bulk_modulus.json", "w") as f:
                f.write(json.dumps(summary_dict, default=DATETIME_HANDLER))

        # TODO: @matk86 - there needs to be a builder to put it into materials collection... -computron
        logger.info("Bulk modulus calculation complete.")
Ejemplo n.º 7
0
    def run_task(self, fw_spec):

        from atomate.vasp.analysis.phonopy import get_phonopy_thermal_expansion

        tag = self["tag"]
        db_file = env_chk(self.get("db_file"), fw_spec)
        t_step = self.get("t_step", 10)
        t_min = self.get("t_min", 0)
        t_max = self.get("t_max", 1000)
        mesh = self.get("mesh", [20, 20, 20])
        eos = self.get("eos", "vinet")
        pressure = self.get("pressure", 0.0)
        summary_dict = {}

        mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
        # get the optimized structure
        d = mmdb.collection.find_one({"task_label": "{} structure optimization".format(tag)})
        structure = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
        summary_dict["structure"] = structure.as_dict()
        summary_dict["formula_pretty"] = structure.composition.reduced_formula

        # get the data(energy, volume, force constant) from the deformation runs
        docs = mmdb.collection.find({"task_label": {"$regex": "{} thermal_expansion*".format(tag)},
                                     "formula_pretty": structure.composition.reduced_formula})
        energies = []
        volumes = []
        force_constants = []
        for d in docs:
            s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
            energies.append(d["calcs_reversed"][-1]["output"]['energy'])
            volumes.append(s.volume)
            force_constants.append(d["calcs_reversed"][-1]["output"]['force_constants'])
        summary_dict["energies"] = energies
        summary_dict["volumes"] = volumes
        summary_dict["force_constants"] = force_constants

        alpha, T = get_phonopy_thermal_expansion(energies, volumes, force_constants, structure,
                                                 t_min, t_step, t_max, mesh, eos, pressure)

        summary_dict["alpha"] = alpha
        summary_dict["T"] = T

        with open("thermal_expansion.json", "w") as f:
            f.write(json.dumps(summary_dict, default=DATETIME_HANDLER))

        # TODO: @matk86 - there needs to be a way to insert this into a database! And also
        # a builder to put it into materials collection... -computron
        logger.info("Thermal expansion coefficient calculation complete.")
Ejemplo n.º 8
0
    def run_task(self, fw_spec):

        prev_dir = fw_spec.get('PREV_DIR', None)
        self.custom_params = self.get('custom_params', None)   

        if isinstance(self["structure"], Structure):
            s = self["structure"]
        elif isinstance(self["structure"], dict):
            s = Structure.from_dict(self["structure"])
        else:
            s = Structure.from_file(os.path.join(prev_dir, self["structure"]))


        vis = load_class("pymatgen.io.vasp.sets", self["vasp_input_set"])(
                         **self.get("input_set_params", {}))
        vis.write_input(s, ".")


        # Write Custom KPOINTS settings if necessary
        ksettings = self.custom_params.get('user_kpts_settings', None) if isinstance(
                self.custom_params, dict) else None
        if ksettings:
            style = ksettings.get('kpts_style', 'Gamma')
            kpoints = ksettings.get('kpts', [16,16,16])
            shift = ksettings.get('kpts_shift', [0,0,0])
            k = Kpoints(kpts=[kpoints], kpts_shift=shift)
            k.style = style
            k.write_file("KPOINTS")
Ejemplo n.º 9
0
    def _create_new_material(self, taskdoc):
        """
        Create a new material document.

        Args:
            taskdoc (dict): a JSON-like task document

        Returns:
            (int) - material_id of the new document
        """
        doc = {"created_at": datetime.utcnow()}
        doc["_tasksbuilder"] = {"all_task_ids": [], "prop_metadata":
            {"labels": {}, "task_ids": {}}, "updated_at": datetime.utcnow()}
        doc["spacegroup"] = taskdoc["output"]["spacegroup"]
        doc["sg_symbol"] = doc["spacegroup"]["symbol"]
        doc["sg_number"] = doc["spacegroup"]["number"]
        doc["structure"] = taskdoc["output"]["structure"]
        doc["material_id"] = self.mid_str(self._counter.find_one_and_update(
                        {"_id": "materialid"}, {"$inc": {"c": 1}}, return_document=ReturnDocument.AFTER)["c"])
        for x in ["formula_anonymous", "formula_pretty", "formula_reduced_abc",
                  "elements", "nelements", "chemsys"]:
            doc[x] = taskdoc[x]

        if "parent_structure" in taskdoc:
            doc["parent_structure"] = taskdoc["parent_structure"]
            t_struct = Structure.from_dict(taskdoc["parent_structure"]["structure"])
            doc["parent_structure"]["formula_reduced_abc"] = t_struct.composition.reduced_formula
        self._materials.insert_one(doc)

        return doc["material_id"]
def create_df():
    newcoll = db['abc3']
    newcoll.drop()
    x = 0
    for doc in db['pauling_file_min_tags'].find().batch_size(75):
        x += 1
        if x % 1000 == 0:
            print x
        if doc['metadata']['_structure']['anonymized_formula'] == 'ABC3' and doc['is_ht'] in [True,False] \
                and 'TiO3' in doc['metadata']['_structure']['reduced_cell_formula']:
            newcoll.insert(doc)
    cursor = newcoll.find()
    df = pd.DataFrame(list(cursor))
    for i, row in df.iterrows():
        df.set_value(i, 'reduced_cell_formula', row['metadata']['_structure']['reduced_cell_formula'])
        try:
            df.set_value(i, 'space_group', int(row['metadata']['_Springer']['geninfo']['Space Group']))
        except:
            df.set_value(i, 'space_group', None)
        try:
            df.set_value(i, 'density', float(row['metadata']['_Springer']['geninfo']['Density'].split()[2]))
        except IndexError as e:
            df.set_value(i, 'density', None)
        structure = Structure.from_dict(row['structure'])
        num_density = (structure.num_sites/structure.volume)
        no_of_atoms = Composition(structure.composition).num_atoms
        num_vol = (structure.volume/no_of_atoms)
        df.set_value(i, 'number_density', num_density)
        df.set_value(i, 'number_volume', num_vol)
        if row['metadata']['_structure']['is_ordered']:
            df.set_value(i, 'is_ordered', 1)
        else:
            df.set_value(i, 'is_ordered', 0)
    df.to_pickle('abc3.pkl')
Ejemplo n.º 11
0
def set_coordination(df):
    for i, row in df.iterrows():
        if i % 10 == 0:
            print i
        struc = Structure.from_dict(row['structure'])
        # using own edited Voronoi algorithm
        try:
            specie_meancoord = get_avg_cns(VoronoiCoordFinder_edited(struc).get_cns())
        except Exception as e:
            print e
            continue
        df.set_value(i, 'VoronoiEd_cn', json.dumps(specie_meancoord))
        df.set_value(i, 'Voronoi_cation_avgcn', get_cation_weighted_avg(specie_meancoord, struc))
        # using own Effective coordination algorithm
        try:
            specie_meaneffcoord = get_avg_cns(EffectiveCoordFinder(struc).get_cns(radius=10.0))
        except Exception as e:
            print e
            continue
        df.set_value(i, 'eff_cn', json.dumps(specie_meaneffcoord))
        df.set_value(i, 'eff_cation_avgcn', get_cation_weighted_avg(specie_meaneffcoord, struc))
        # using Tina's O'Keeffe coordination algorithm
        if row['metadata']['_structure']['is_ordered']:
            try:
                okeeffe_coord = okeeffe_get_avg_cn(struc)
            except Exception as e:
                print e
                continue
            df.set_value(i, 'okeeffe_cn', json.dumps(okeeffe_coord))
            df.set_value(i, 'okeeffe_cn_avg', get_cation_weighted_avg(okeeffe_coord, struc))
    return df
Ejemplo n.º 12
0
 def run_task(self, fw_spec):
     s = Structure.from_dict(self["structure"])
     mod = __import__("pymatgen.io.vaspio_set", globals(), locals(),
                      [self["vasp_input_set"]], -1)
     vis = load_class("pymatgen.io.vaspio_set", self["vasp_input_set"])(
         **self.get("input_set_params", {}))
     vis.write_input(s, ".")
Ejemplo n.º 13
0
    def run(self):
        print("MaterialsEhullBuilder starting...")
        self._build_indexes()

        q = {"thermo.energy": {"$exists": True}}
        if not self.update_all:
            q["stability"] = {"$exists": False}

        mats = [m for m in self._materials.find(q, {"calc_settings": 1, "structure": 1,
                                                    "thermo.energy": 1, "material_id": 1})]
        pbar = tqdm(mats)
        for m in pbar:
            pbar.set_description("Processing materials_id: {}".format(m['material_id']))
            try:
                params = {}
                for x in ["is_hubbard", "hubbards", "potcar_spec"]:
                    params[x] = m["calc_settings"][x]

                structure = Structure.from_dict(m["structure"])
                energy = m["thermo"]["energy"]
                my_entry = ComputedEntry(structure.composition, energy, parameters=params)
                self._materials.update_one({"material_id": m["material_id"]},
                                           {"$set": {"stability": self.mpr.get_stability([my_entry])[0]}})

                mpids = self.mpr.find_structure(structure)
                self._materials.update_one({"material_id": m["material_id"]}, {"$set": {"mpids": mpids}})

            except:
                import traceback
                print("<---")
                print("There was an error processing material_id: {}".format(m))
                traceback.print_exc()
                print("--->")

        print("MaterialsEhullBuilder finished processing.")
Ejemplo n.º 14
0
 def get(self, cid, name):
     """Retrieve structure for contribution in CIF format.
     ---
     operationId: get_cif
     parameters:
         - name: cid
           in: path
           type: string
           pattern: '^[a-f0-9]{24}$'
           required: true
           description: contribution ID (ObjectId)
         - name: name
           in: path
           type: string
           required: true
           description: name of structure
     responses:
         200:
             description: structure in CIF format
             schema:
                 type: string
     """
     mask = [f'content.structures.{name}']
     entry = Contributions.objects.only(*mask).get(id=cid)
     structure = Structure.from_dict(entry.content.structures.get(name))
     if structure:
         return CifWriter(structure, symprec=1e-10).__str__()
     return f"Structure with name {name} not found for {cid}!" # TODO raise 404?
Ejemplo n.º 15
0
    def run_task(self, fw_spec):
        additional_fields = self.get("additional_fields", {})

        # pass the additional_fields first to avoid overriding BoltztrapAnalyzer items
        d = additional_fields.copy()

        btrap_dir = os.path.join(os.getcwd(), "boltztrap")
        d["boltztrap_dir"] = btrap_dir

        bta = BoltztrapAnalyzer.from_files(btrap_dir)
        d.update(bta.as_dict())
        d["scissor"] = bta.intrans["scissor"]

        # trim the output
        for x in ['cond', 'seebeck', 'kappa', 'hall', 'mu_steps', 'mu_doping', 'carrier_conc']:
            del d[x]

        if not self.get("hall_doping"):
            del d["hall_doping"]

        bandstructure_dir = os.getcwd()
        d["bandstructure_dir"] = bandstructure_dir

        # add the structure
        v, o = get_vasprun_outcar(bandstructure_dir, parse_eigen=False, parse_dos=False)
        structure = v.final_structure
        d["structure"] = structure.as_dict()
        d["formula_pretty"] = structure.composition.reduced_formula
        d.update(get_meta_from_structure(structure))

        # add the spacegroup
        sg = SpacegroupAnalyzer(Structure.from_dict(d["structure"]), 0.1)
        d["spacegroup"] = {"symbol": sg.get_space_group_symbol(),
                           "number": sg.get_space_group_number(),
                           "point_group": sg.get_point_group_symbol(),
                           "source": "spglib",
                           "crystal_system": sg.get_crystal_system(),
                           "hall": sg.get_hall()}

        d["created_at"] = datetime.utcnow()

        db_file = env_chk(self.get('db_file'), fw_spec)

        if not db_file:
            del d["dos"]
            with open(os.path.join(btrap_dir, "boltztrap.json"), "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            mmdb = VaspCalcDb.from_db_file(db_file, admin=True)

            # dos gets inserted into GridFS
            dos = json.dumps(d["dos"], cls=MontyEncoder)
            fsid, compression = mmdb.insert_gridfs(dos, collection="dos_boltztrap_fs",
                                                   compress=True)
            d["dos_boltztrap_fs_id"] = fsid
            del d["dos"]

            mmdb.db.boltztrap.insert(d)
Ejemplo n.º 16
0
    def run_task(self, fw_spec):

        from atomate.tools.analysis import get_phonopy_thermal_expansion

        tag = self["tag"]
        db_file = env_chk(self.get("db_file"), fw_spec)
        t_step = self.get("t_step", 10)
        t_min = self.get("t_min", 0)
        t_max = self.get("t_max", 1000)
        mesh = self.get("mesh", [20, 20, 20])
        eos = self.get("eos", "vinet")
        pressure = self.get("pressure", 0.0)
        summary_dict = {}

        mmdb = MMVaspDb.from_db_file(db_file, admin=True)
        # get the optimized structure
        d = mmdb.collection.find_one({"task_label": "{} structure optimization".format(tag)})
        structure = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
        summary_dict["structure"] = structure.as_dict()

        # get the data(energy, volume, force constant) from the deformation runs
        docs = mmdb.collection.find({"task_label": {"$regex": "{} thermal_expansion*".format(tag)},
                                     "formula_pretty": structure.composition.reduced_formula})
        energies = []
        volumes = []
        force_constants = []
        for d in docs:
            s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
            energies.append(d["calcs_reversed"][-1]["output"]['energy'])
            volumes.append(s.volume)
            force_constants.append(d["calcs_reversed"][-1]["output"]['force_constants'])
        summary_dict["energies"] = energies
        summary_dict["volumes"] = volumes
        summary_dict["force_constants"] = force_constants

        alpha, T = get_phonopy_thermal_expansion(energies, volumes, force_constants, structure,
                                                 t_min, t_step, t_max, mesh, eos, pressure)

        summary_dict["alpha"] = alpha
        summary_dict["T"] = T

        with open("thermal_expansion.json", "w") as f:
            f.write(json.dumps(summary_dict, default=DATETIME_HANDLER))

        logger.info("THERMAL EXPANSION COEFF CALCULATION COMPLETE")
Ejemplo n.º 17
0
 def test_convert_to_ieee(self):
     for xtal in self.ieee_data.keys():
         orig = TensorBase(self.ieee_data[xtal]['original_tensor'])
         ieee = TensorBase(self.ieee_data[xtal]['ieee_tensor'])
         struct = Structure.from_dict(self.ieee_data[xtal]['structure'])
         diff = np.max(abs(ieee - orig.convert_to_ieee(struct)))
         err_msg = "{} IEEE conversion failed with max diff {}".format(
             xtal, diff) 
         self.assertArrayAlmostEqual(ieee, orig.convert_to_ieee(struct),
                                     err_msg = err_msg, decimal=3)
Ejemplo n.º 18
0
    def test_list_based_functions(self):
        # zeroed
        tc = TensorCollection([1e-4*Tensor(np.eye(3))]*4)
        for t in tc.zeroed():
            self.assertArrayEqual(t, np.zeros((3, 3)))
        for t in tc.zeroed(1e-5):
            self.assertArrayEqual(t, 1e-4*np.eye(3))
        self.list_based_function_check("zeroed", tc)
        self.list_based_function_check("zeroed", tc, tol=1e-5)

        # transform
        symm_op = SymmOp.from_axis_angle_and_translation([0, 0, 1], 30,
                                                         False, [0, 0, 1])
        self.list_based_function_check("transform", self.seq_tc, symm_op=symm_op)
        
        # symmetrized
        self.list_based_function_check("symmetrized", self.seq_tc)

        # rotation
        a = 3.14 * 42.5 / 180
        rotation = SquareTensor([[math.cos(a), 0, math.sin(a)], [0, 1, 0],
                                 [-math.sin(a), 0, math.cos(a)]])
        self.list_based_function_check("rotate", self.diff_rank, matrix=rotation)

        # is_symmetric
        self.assertFalse(self.seq_tc.is_symmetric())
        self.assertTrue(self.diff_rank.is_symmetric())

        # fit_to_structure
        self.list_based_function_check("fit_to_structure", self.diff_rank, self.struct)
        self.list_based_function_check("fit_to_structure", self.seq_tc, self.struct)
        
        # fit_to_structure
        self.list_based_function_check("fit_to_structure", self.diff_rank, self.struct)
        self.list_based_function_check("fit_to_structure", self.seq_tc, self.struct)

        # voigt
        self.list_based_function_check("voigt", self.diff_rank)

        # is_voigt_symmetric
        self.assertTrue(self.diff_rank.is_voigt_symmetric())
        self.assertFalse(self.seq_tc.is_voigt_symmetric())

        # Convert to ieee
        for entry in self.ieee_data[:2]:
            xtal = entry['xtal']
            tc = TensorCollection([entry['original_tensor']]*3)
            struct = Structure.from_dict(entry['structure'])
            self.list_based_function_check("convert_to_ieee", tc, struct)

        # from_voigt
        tc_input = [t for t in np.random.random((3, 6, 6))]
        tc = TensorCollection.from_voigt(tc_input)
        for t_input, t in zip(tc_input, tc):
            self.assertArrayAlmostEqual(Tensor.from_voigt(t_input), t)
Ejemplo n.º 19
0
def get_data(db_file, query):
    coll = get_collection(db_file)
    docs = coll.find(query)
    energies = []
    volumes = []
    force_constants = []
    for d in docs:
        s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
        energies.append(d["calcs_reversed"][-1]["output"]['energy'])
        volumes.append(s.volume)
        force_constants.append(d["calcs_reversed"][-1]["output"]['force_constants'])
    return energies, volumes, force_constants
Ejemplo n.º 20
0
    def _match_material(self, taskdoc, ltol=0.2, stol=0.3, angle_tol=5):
        """
        Returns the material_id that has the same structure as this task as
         determined by the structure matcher. Returns None if no match.

        Args:
            taskdoc (dict): a JSON-like task document
            ltol (float): StructureMatcher tuning parameter 
            stol (float): StructureMatcher tuning parameter 
            angle_tol (float): StructureMatcher tuning parameter

        Returns:
            (int) matching material_id or None
        """
        formula = taskdoc["formula_reduced_abc"]

        # handle the "parent structure" option, which is used to intentionally force slightly
        # different structures to contribute to the same "material", e.g. from an ordering scheme
        if "parent_structure" in taskdoc:
            t_struct = Structure.from_dict(taskdoc["parent_structure"]["structure"])
            q = {"formula_reduced_abc": formula, "parent_structure.spacegroup.number": taskdoc[
                "parent_structure"]["spacegroup"]["number"]}
        else:
            sgnum = taskdoc["output"]["spacegroup"]["number"]
            t_struct = Structure.from_dict(taskdoc["output"]["structure"])
            q = {"formula_reduced_abc": formula, "sg_number": sgnum}

        for m in self._materials.find(q, {"parent_structure": 1, "structure": 1, "material_id": 1}):
            s_dict = m["parent_structure"]["structure"] if "parent_structure" in m else m[
                "structure"]
            m_struct = Structure.from_dict(s_dict)
            sm = StructureMatcher(ltol=ltol, stol=stol, angle_tol=angle_tol,
                                  primitive_cell=True, scale=True,
                                  attempt_supercell=False, allow_subset=False,
                                  comparator=ElementComparator())

            if sm.fit(m_struct, t_struct):
                return m["material_id"]

        return None
Ejemplo n.º 21
0
    def run_task(self, fw_spec):

        # Get optimized structure
        # TODO: will this find the correct path if the workflow is rerun from the start?
        optimize_loc = fw_spec["calc_locs"][0]["path"]
        logger.info("PARSING INITIAL OPTIMIZATION DIRECTORY: {}".format(optimize_loc))
        drone = VaspDrone()
        optimize_doc = drone.assimilate(optimize_loc)
        opt_struct = Structure.from_dict(optimize_doc["calcs_reversed"][0]["output"]["structure"])

        d = {"analysis": {}, "deformation_tasks": fw_spec["deformation_tasks"],
             "initial_structure": self['structure'].as_dict(),
             "optimized_structure": opt_struct.as_dict()}
        if fw_spec.get("tags",None):
            d["tags"] = fw_spec["tags"]
        dtypes = fw_spec["deformation_tasks"].keys()
        defos = [fw_spec["deformation_tasks"][dtype]["deformation_matrix"]
                 for dtype in dtypes]
        stresses = [fw_spec["deformation_tasks"][dtype]["stress"] for dtype in dtypes]
        stress_dict = {IndependentStrain(defo) : Stress(stress) for defo, stress
                       in zip(defos, stresses)}

        logger.info("ANALYZING STRESS/STRAIN DATA")
        # DETERMINE IF WE HAVE 6 "UNIQUE" deformations
        if len(set([de[:3] for de in dtypes])) == 6:
            # Perform Elastic tensor fitting and analysis
            result = ElasticTensor.from_stress_dict(stress_dict)
            d["elastic_tensor"] = result.voigt.tolist()
            kg_average = result.kg_average
            d.update({"K_Voigt": kg_average[0], "G_Voigt": kg_average[1],
                      "K_Reuss": kg_average[2], "G_Reuss": kg_average[3],
                      "K_Voigt_Reuss_Hill": kg_average[4],
                      "G_Voigt_Reuss_Hill": kg_average[5]})
            d["universal_anisotropy"] = result.universal_anisotropy
            d["homogeneous_poisson"] = result.homogeneous_poisson

        else:
            raise ValueError("Fewer than 6 unique deformations")

        d["state"] = "successful"

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = MMVaspDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("ELASTIC ANALYSIS COMPLETE")
        return FWAction()
Ejemplo n.º 22
0
 def test_convert_to_ieee(self):
     for entry in self.ieee_data:
         xtal = entry['xtal']
         orig = Tensor(entry['original_tensor'])
         ieee = Tensor(entry['ieee_tensor'])
         struct = Structure.from_dict(entry['structure'])
         diff = np.max(abs(ieee - orig.convert_to_ieee(struct)))
         err_msg = "{} IEEE conversion failed with max diff {}. Numpy version: {}".format(
             xtal, diff, np.__version__)
         print(ieee)
         print(orig.convert_to_ieee(struct))
         self.assertArrayAlmostEqual(ieee, orig.convert_to_ieee(struct),
                                     err_msg=err_msg, decimal=3)
Ejemplo n.º 23
0
 def test_convert_to_ieee(self):
     for entry in self.ieee_data:
         xtal = entry['xtal']
         orig = TensorBase(entry['original_tensor'])
         ieee = TensorBase(entry['ieee_tensor'])
         struct = Structure.from_dict(entry['structure'])
         diff = np.max(abs(ieee - orig.convert_to_ieee(struct)))
         err_msg = "{} IEEE conversion failed with max diff {}".format(
             xtal, diff)
         self.assertArrayAlmostEqual(ieee,
                                     orig.convert_to_ieee(struct),
                                     err_msg=err_msg,
                                     decimal=3)
Ejemplo n.º 24
0
def min_anion_activated(doc):
    """min_anion_activated
    Gets the mininmum distance to the anion in the activated state.
    :param doc: MongoDB document.
    :return: Minimum activation state anion distance along the path.
    """
    structures = [Structure.from_dict(s) for s in doc["neb_images"]]
    neb_energies = [float(e) for e in doc["NEB_analysis"]["path_energy"]]
    max_energy = max(neb_energies)
    activated_state_index = neb_energies.index(max_energy)
    w_i = determine_working_ion(structures[0])
    d_i = get_diffuser_index(structures, w_i)
    return get_anion_dist_in_struct(structures[activated_state_index], d_i)
Ejemplo n.º 25
0
def run_check(s, fmt="cif"):
    structure = Structure.from_dict(s, fmt=fmt)
    mofcheckerinstance, result = _run_check(structure)
    rows = []
    for k, v in result.items():
        if k not in ["name", "path", "density"]:
            id = "table_{}".format(k)
            if v == mofcheckerinstance.check_expected_values[k]:
                color = OK_COLOR
            else:
                color = BAD_COLOR
            tooltip = dbc.Tooltip(
                mofcheckerinstance.check_descriptions[k],
                target=id,
            )

            rows.append(
                html.Tr(
                    [
                        html.Td(
                            [
                                k.replace("_", " ").replace(
                                    "oms", "open metal site"),
                                tooltip,
                            ],
                            id=id,
                        ),
                        html.Td(str(v)),
                    ],
                    style={"background-color": color},
                ))

    table = (dbc.Table(
        [
            html.Thead([
                html.Td("Check name", style={"font-weight": "bold"}),
                html.Td("Check result", style={"font-weight": "bold"}),
            ]),
            *rows,
        ],
        bordered=True,
        hover=True,
        responsive=True,
        striped=True,
        style={
            "width": "93%",
            "margin-left": "5%"
        },
    ), )

    return table
Ejemplo n.º 26
0
    def __init__(self, spec, label, user_incar_settings=None,
                 user_kpoints_settings=None,
                 additional_cust_args=None, **kwargs):
        """
        Args:
            spec (dict): Specification of the job to run.
            label (str): "parent", "ep0" or "ep1"
            vasp_input_set (VaspInputSet): Input set to use.
            user_kpoints_settings (dict): Additional KPOINTS settings.
            additional_cust_args (dict): Other kwargs that are passed to RunVaspCustodian.
            \*\*kwargs: Other kwargs that are passed to Firework.__init__.
        """

        # Get structure from spec
        assert label in ["parent", "ep0", "ep1"]
        structure_dict = spec[label]
        structure = Structure.from_dict(structure_dict)

        user_incar_settings = user_incar_settings or {}
        user_kpoints_settings = user_kpoints_settings or {}
        additional_cust_args = additional_cust_args or {}

        # Task 1: Write input sets
        if label == 'parent':
            vasp_input_set = MITRelaxSet(structure,
                                         user_incar_settings=user_incar_settings,
                                         user_kpoints_settings=user_kpoints_settings)
        else:  # label == "ep0" or "ep1"
            from pymatgen_diffusion.neb.io import MVLCINEBEndPointSet

            vasp_input_set = MVLCINEBEndPointSet(structure,
                                                 user_incar_settings=user_incar_settings,
                                                 user_kpoints_settings=user_kpoints_settings)

        write_ep_task = WriteVaspFromIOSet(structure=structure, output_dir=".",
                                           vasp_input_set=vasp_input_set)

        # Task 2: Run VASP using Custodian
        cust_args = {"job_type": "normal", "gzip_output": False,
                     "handler_group": "no_handler"}
        cust_args.update(additional_cust_args)
        run_vasp = RunVaspCustodian(vasp_cmd=">>vasp_cmd<<",
                                    gamma_vasp_cmd=">>gamma_vasp_cmd<<",
                                    **cust_args)

        # Task 3, 4: Transfer and PassCalLocs
        tasks = [write_ep_task, run_vasp, TransferNEBTask(label=label),
                 PassCalcLocs(name=label)]

        super(NEBRelaxationFW, self).__init__(tasks, spec=spec, name=label,
                                              **kwargs)
Ejemplo n.º 27
0
    def __init__(self, spec, label, user_incar_settings=None,
                 user_kpoints_settings=None,
                 additional_cust_args=None, **kwargs):
        """
        Args:
            spec (dict): Specification of the job to run.
            label (str): "parent", "ep0" or "ep1"
            vasp_input_set (VaspInputSet): Input set to use.
            user_kpoints_settings (dict): Additional KPOINTS settings.
            additional_cust_args (dict): Other kwargs that are passed to RunVaspCustodian.
            \*\*kwargs: Other kwargs that are passed to Firework.__init__.
        """

        # Get structure from spec
        assert label in ["parent", "ep0", "ep1"]
        structure_dict = spec[label]
        structure = Structure.from_dict(structure_dict)

        user_incar_settings = user_incar_settings or {}
        user_kpoints_settings = user_kpoints_settings or {}
        additional_cust_args = additional_cust_args or {}

        # Task 1: Write input sets
        if label == 'parent':
            vasp_input_set = MITRelaxSet(structure,
                                         user_incar_settings=user_incar_settings,
                                         user_kpoints_settings=user_kpoints_settings)
        else:  # label == "ep0" or "ep1"
            from pymatgen_diffusion.neb.io import MVLCINEBEndPointSet

            vasp_input_set = MVLCINEBEndPointSet(structure,
                                                 user_incar_settings=user_incar_settings,
                                                 user_kpoints_settings=user_kpoints_settings)

        write_ep_task = WriteVaspFromIOSet(structure=structure,
                                           vasp_input_set=vasp_input_set)

        # Task 2: Run VASP using Custodian
        cust_args = {"job_type": "normal", "gzip_output": False,
                     "handler_group": "no_handler"}
        cust_args.update(additional_cust_args)
        run_vasp = RunVaspCustodian(vasp_cmd=">>vasp_cmd<<",
                                    gamma_vasp_cmd=">>gamma_vasp_cmd<<",
                                    **cust_args)

        # Task 3, 4: Transfer and PassCalLocs
        tasks = [write_ep_task, run_vasp, TransferNEBTask(label=label),
                 PassCalcLocs(name=label)]

        super(NEBRelaxationFW, self).__init__(tasks, spec=spec, name=label,
                                              **kwargs)
Ejemplo n.º 28
0
def pymatgen_comparisons(structures,
                         comparator='OccupancyComparator',
                         anonymous=False,
                         **kwargs):
    '''
    Distance based on pymatgen StructureMatcher rms distance
    Args:
        structures ([dict]): dictionary-encoded pymatgen Structure objects
        comparator (str): name of comparator object to use from ['StructureMatcher',
                                                 'AbstractComparator', 'ElementComparator',
                                                 'FrameworkComparator', 'OccupancyComparator',
                                                 'OrderDisorderElementComparator',
                                                 'SpeciesComparator', 'SpinComparator']
        anonymous (bool): whether or not to ignore species
        **kwargs: **kwargs to be passed to pymatgen's StructureMatcher object
    Returns:
        [bool]: comparisons between structures; goes like
            1-2, 1-3, ..., 1-n, 2-3, 2-4, ..., 2-n, ...]
    '''
    comparators = {
        'AbstractComparator': AbstractComparator,
        'ElementComparator': ElementComparator,
        'FrameworkComparator': FrameworkComparator,
        'OccupancyComparator': OccupancyComparator,
        'OrderDisorderElemementComparator': OrderDisorderElementComparator,
        'SpeciesComparator': SpeciesComparator,
        'SpinComparator': SpinComparator,
        None: None
    }
    comparator = comparators[comparator]()
    structure_matcher = StructureMatcher(comparator=comparator, **kwargs)
    structures = [Structure.from_dict(structure) for structure in structures]
    stars = []
    for i in range(len(structures)):
        for j in range(i, len(structures)):
            stars.append((structures[i], structures[j]))
    pool = mp.Pool()
    if anonymous:
        results = pool.starmap(structure_matcher.fit_anonymous, stars)
    else:
        results = pool.starmap(structure_matcher.fit, stars)
    results = [bool(results) for result in results]

    comparisons = np.zeros(len(structures), len(structures))
    counter = 0
    for i in range(len(structures)):
        for j in range(i + 1, len(structures)):
            comparisons[i, j] = results[counter]
            counter += 1

    return comparisons.tolist()
Ejemplo n.º 29
0
    def match(self, snls, mat):
        """
        Finds a material doc that matches with the given snl

        Args:
            snl ([dict]): the snls list
            mat (dict): a materials doc

        Returns:
            generator of materials doc keys
        """
        sm = StructureMatcher(
            ltol=self.ltol,
            stol=self.stol,
            angle_tol=self.angle_tol,
            primitive_cell=True,
            scale=True,
            attempt_supercell=False,
            allow_subset=False,
            comparator=ElementComparator())

        m_strucs = [Structure.from_dict(mat["structure"])
                    ] + [Structure.from_dict(init_struc) for init_struc in mat["initial_structures"]]
        for snl in snls:
            snl_struc = StructureNL.from_dict(snl).structure
            try:
                snl_spacegroup = snl_struc.get_space_group_info()[0]
            except:
                snl_spacegroup = -1
            for struc in m_strucs:
                try:
                    struc_sg = struc.get_space_group_info()[0]
                except:
                    struc_sg = -1
                # The try-excepts are a temp fix to a spglib bug
                if struc_sg == snl_spacegroup and sm.fit(struc, snl_struc):
                    yield snl
                    break
Ejemplo n.º 30
0
    def run(self):
        logger.info("MaterialsEhullBuilder starting...")
        self._build_indexes()

        q = {"thermo.energy": {"$exists": True}}
        if not self.update_all:
            q["stability"] = {"$exists": False}

        mats = [m for m in self._materials.find(q, {"calc_settings": 1, "structure": 1,
                                                    "thermo.energy": 1, "material_id": 1})]
        pbar = tqdm(mats)
        for m in pbar:
            pbar.set_description("Processing materials_id: {}".format(m['material_id']))
            try:
                params = {}
                for x in ["is_hubbard", "hubbards", "potcar_spec"]:
                    params[x] = m["calc_settings"][x]

                structure = Structure.from_dict(m["structure"])
                energy = m["thermo"]["energy"]
                my_entry = ComputedEntry(structure.composition, energy, parameters=params)

                # TODO: @computron This only calculates Ehull with respect to Materials Project.
                # It should also account for the current database's results. -computron
                self._materials.update_one({"material_id": m["material_id"]},
                                           {"$set": {"stability": self.mpr.get_stability([my_entry])[0]}})

                # TODO: @computron: also add additional properties like inverse hull energy?

                # TODO: @computron it's better to use PD tool or reaction energy calculator
                # Otherwise the compatibility schemes might have issues...one strategy might be
                # use MP only to retrieve entries but compute the PD locally -computron
                for el, elx in my_entry.composition.items():
                    entries = self.mpr.get_entries(el.symbol, compatible_only=True)
                    min_e = min(entries, key=lambda x: x.energy_per_atom).energy_per_atom
                    energy -= elx * min_e
                self._materials.update_one({"material_id": m["material_id"]},
                                           {"$set": {"thermo.formation_energy_per_atom": energy / structure.num_sites}})

                mpids = self.mpr.find_structure(structure)
                self._materials.update_one({"material_id": m["material_id"]}, {"$set": {"mpids": mpids}})

            except:
                import traceback
                logger.exception("<---")
                logger.exception("There was an error processing material_id: {}".format(m))
                logger.exception(traceback.format_exc())
                logger.exception("--->")

        logger.info("MaterialsEhullBuilder finished processing.")
Ejemplo n.º 31
0
 def test_write_read_cfgs(self):
     self.potential.write_cfgs("input.data", cfg_pool=self.test_pool)
     datapool, df = self.potential.read_cfgs("input.data")
     self.assertEqual(len(self.test_pool), len(datapool))
     for data1, data2 in zip(self.test_pool, datapool):
         struct1 = data1["structure"]
         struct2 = Structure.from_dict(data2["structure"])
         self.assertTrue(struct1 == struct2)
         energy1 = data1["outputs"]["energy"]
         energy2 = data2["outputs"]["energy"]
         self.assertTrue(abs(energy1 - energy2) < 1e-3)
         forces1 = np.array(data1["outputs"]["forces"])
         forces2 = data2["outputs"]["forces"]
         np.testing.assert_array_almost_equal(forces1, forces2)
Ejemplo n.º 32
0
    def setUp(self):
        # first for spin polarized version
        doscar = os.path.join(test_dir_doscar, "DOSCAR.lobster.spin")
        vasprun = os.path.join(test_dir_doscar, "vasprun.xml.lobster.spin")
        doscar2 = os.path.join(test_dir_doscar, "DOSCAR.lobster.nonspin")
        vasprun2 = os.path.join(test_dir_doscar, "vasprun.xml.lobster.nonspin")

        self.DOSCAR_spin_pol = Doscar(doscar=doscar, vasprun=vasprun)
        self.DOSCAR_nonspin_pol = Doscar(doscar=doscar2, vasprun=vasprun2)

        with open(os.path.join(test_dir_doscar, 'structure_KF.json'), 'r') as f:
            data = json.load(f)

        self.structure = Structure.from_dict(data)
Ejemplo n.º 33
0
 def test_write_read_cfgs(self):
     self.potential.write_cfg('test.cfgs', cfg_pool=self.test_pool)
     datapool, df = self.potential.read_cfgs('test.cfgs', symbol='Mo')
     self.assertEqual(len(self.test_pool), len(datapool))
     for data1, data2 in zip(self.test_pool, datapool):
         struct1 = data1['structure']
         struct2 = Structure.from_dict(data2['structure'])
         self.assertTrue(struct1 == struct2)
         energy1 = data1['outputs']['energy']
         energy2 = data2['outputs']['energy']
         self.assertAlmostEqual(energy1, energy2)
         forces1 = np.array(data1['outputs']['forces'])
         forces2 = data2['outputs']['forces']
         np.testing.assert_array_almost_equal(forces1, forces2)
Ejemplo n.º 34
0
    def setUp(self):
        # first for spin polarized version
        doscar = os.path.join(test_dir_doscar, "DOSCAR.lobster.spin")
        vasprun = os.path.join(test_dir_doscar, "vasprun.xml.lobster.spin")
        doscar2 = os.path.join(test_dir_doscar, "DOSCAR.lobster.nonspin")
        vasprun2 = os.path.join(test_dir_doscar, "vasprun.xml.lobster.nonspin")

        self.DOSCAR_spin_pol = Doscar(doscar=doscar, vasprun=vasprun)
        self.DOSCAR_nonspin_pol = Doscar(doscar=doscar2, vasprun=vasprun2)

        with open(os.path.join(test_dir_doscar, 'structure_KF.json'), 'r') as f:
            data = json.load(f)

        self.structure = Structure.from_dict(data)
Ejemplo n.º 35
0
    def get_entries(self, chemsys):
        """
        Get all entries in a chemsys from materials

        Args:
            chemsys(str): a chemical system represented by string elements seperated by a dash (-)

        Returns:
            set(ComputedEntry): a set of entries for this system
        """

        self.logger.info("Getting entries for: {}".format(chemsys))

        new_q = dict(self.query)
        new_q["chemsys"] = {"$in": list(chemsys_permutations(chemsys))}
        fields = [
            "structure", self.materials.key, "thermo.energy",
            "unit_cell_formula", "calc_settings.is_hubbard",
            "calc_settings.hubbards", "calc_settings.potcar_spec",
            "calc_settings.run_type"
        ]
        data = list(self.materials.query(fields, new_q))

        all_entries = []

        for d in data:
            parameters = {
                "is_hubbard": d["calc_settings"]["is_hubbard"],
                "hubbards": d["calc_settings"]["hubbards"],
                "potcar_spec": d["calc_settings"]["potcar_spec"],
                "run_type": d["calc_settings"]["run_type"]
            }

            entry = ComputedEntry(Composition(d["unit_cell_formula"]),
                                  d["thermo"]["energy"],
                                  0.0,
                                  parameters=parameters,
                                  entry_id=d[self.materials.key],
                                  data={
                                      "oxide_type":
                                      oxide_type(
                                          Structure.from_dict(d["structure"]))
                                  })

            all_entries.append(entry)

        self.logger.info("Total entries in {} : {}".format(
            chemsys, len(all_entries)))

        return all_entries
Ejemplo n.º 36
0
    def run(self):
        logger.info("MaterialsEhullBuilder starting...")
        self._build_indexes()

        q = {"thermo.energy": {"$exists": True}}
        if not self.update_all:
            q["stability"] = {"$exists": False}

        mats = [m for m in self._materials.find(q, {"calc_settings": 1, "structure": 1,
                                                    "thermo.energy": 1, "material_id": 1})]
        pbar = tqdm(mats)
        for m in pbar:
            pbar.set_description("Processing materials_id: {}".format(m['material_id']))
            try:
                params = {}
                for x in ["is_hubbard", "hubbards", "potcar_spec"]:
                    params[x] = m["calc_settings"][x]

                structure = Structure.from_dict(m["structure"])
                energy = m["thermo"]["energy"]
                my_entry = ComputedEntry(structure.composition, energy, parameters=params)

                # TODO: @computron This only calculates Ehull with respect to Materials Project.
                # It should also account for the current database's results. -computron
                self._materials.update_one({"material_id": m["material_id"]},
                                           {"$set": {"stability": self.mpr.get_stability([my_entry])[0]}})

                # TODO: @computron: also add additional properties like inverse hull energy?

                # TODO: @computron it's better to use PD tool or reaction energy calculator
                # Otherwise the compatibility schemes might have issues...one strategy might be
                # use MP only to retrieve entries but compute the PD locally -computron
                for el, elx in my_entry.composition.items():
                    entries = self.mpr.get_entries(el.symbol, compatible_only=True)
                    min_e = min(entries, key=lambda x: x.energy_per_atom).energy_per_atom
                    energy -= elx * min_e
                self._materials.update_one({"material_id": m["material_id"]},
                                           {"$set": {"thermo.formation_energy_per_atom": energy / structure.num_sites}})

                mpids = self.mpr.find_structure(structure)
                self._materials.update_one({"material_id": m["material_id"]}, {"$set": {"mpids": mpids}})

            except:
                import traceback
                logger.exception("<---")
                logger.exception("There was an error processing material_id: {}".format(m))
                logger.exception(traceback.format_exc())
                logger.exception("--->")

        logger.info("MaterialsEhullBuilder finished processing.")
Ejemplo n.º 37
0
def get_empty_structures(criterion, s_values, coll):
    """get_empty_structures
    Gets all host structures associated with a list of search values.
    :param criterion: The search criterion to use to get structures.
    :param s_values: List of search values (e.g. path IDs) to get data for.
    :param coll: A PyMongo collection.
    :return: A dictionary of {s_value: empty_structure} 
    """
    s_dict = {}
    for s in s_values:
        results = query_db(criterion, s, coll)
        structure = Structure.from_dict(results[0]["host_structure"])
        s_dict[s] = structure
    return s_dict
Ejemplo n.º 38
0
def process_traj(data):
    i, fs_id, fs, db_file = data[0], data[1], data[2], data[3]
    mmdb = VaspMDCalcDb.from_db_file(db_file, admin=True)
    ionic_steps_dict = load_ionic_steps(fs_id, mmdb.db, fs)

    structure = Structure.from_dict(ionic_steps_dict[0]['structure'])
    positions = [0] * len(ionic_steps_dict)
    for i, step in enumerate(ionic_steps_dict):
        _step = [atom['abc'] for atom in step["structure"]["sites"]]
        positions[i] = _step

    traj = Trajectory(structure.lattice.matrix, structure.species, positions,
                      0.002)
    return i, traj.as_dict()
Ejemplo n.º 39
0
def group_by_material_id(materials_dict,
                         docs,
                         tol=1e-6,
                         structure_matcher=None):
    """
    Groups a collection of documents by material id
    as found in a materials collection

    Args:
        materials_dict (dict): dictionary of structures keyed by material_id
        docs ([dict]): list of documents
        tol: tolerance for lattice grouping
        structure_matcher (StructureMatcher): structure
            matcher for finding equivalent structures

    Returns:
        documents grouped by material_id from the materials
        collection
    """
    sm = structure_matcher or StructureMatcher()
    tasks_by_opt = group_deformations_by_optimization_task(docs, tol)
    task_sets_by_mp_id = {}
    for opt_task, defo_tasks in tasks_by_opt:
        structure = Structure.from_dict(opt_task['output']['structure'])
        match = False
        for c_id, candidate in materials_dict.items():
            c_structure = Structure.from_dict(candidate)
            if sm.fit(c_structure, structure):
                mp_id = c_id
                match = True
                break
        if match:
            if mp_id in task_sets_by_mp_id:
                task_sets_by_mp_id[mp_id].append((opt_task, defo_tasks))
            else:
                task_sets_by_mp_id[mp_id] = [(opt_task, defo_tasks)]
    return task_sets_by_mp_id
Ejemplo n.º 40
0
    def setUp(self):

        self.s1 = Structure.from_spacegroup(225,
                                            Lattice.cubic(5.69169),
                                            ["Na", "Cl"],
                                            [[0, 0, 0], [0, 0, 0.5]])
        self.s2 = Structure.from_dict({'@class': 'Structure',
                                       '@module': 'pymatgen.core.structure',
                                       'charge': None,
                                       'lattice': {'a': 5.488739045730133,
                                                   'alpha': 60.0000000484055,
                                                   'b': 5.488739048031658,
                                                   'beta': 60.00000003453459,
                                                   'c': 5.48873905,
                                                   'gamma': 60.000000071689925,
                                                   'matrix': [[4.75338745, 0.0, 2.74436952],
                                                              [1.58446248, 4.48153667, 2.74436952],
                                                              [0.0, 0.0, 5.48873905]],
                                                   'volume': 116.92375473740876},
                                       'sites': [{'abc': [0.5, 0.5, 0.5],
                                                  'label': 'Al',
                                                  'properties': {'coordination_no': 10, 'forces': [0.0, 0.0, 0.0]},
                                                  'species': [{'element': 'Al', 'occu': 1}],
                                                  'xyz': [3.168924965, 2.240768335, 5.488739045]},
                                                 {'abc': [0.5, 0.5, 0.0],
                                                  'label': 'Al',
                                                  'properties': {'coordination_no': 10, 'forces': [0.0, 0.0, 0.0]},
                                                  'species': [{'element': 'Al', 'occu': 1}],
                                                  'xyz': [3.168924965, 2.240768335, 2.74436952]},
                                                 {'abc': [0.0, 0.5, 0.5],
                                                  'label': 'Al',
                                                  'properties': {'coordination_no': 10, 'forces': [0.0, 0.0, 0.0]},
                                                  'species': [{'element': 'Al', 'occu': 1}],
                                                  'xyz': [0.79223124, 2.240768335, 4.116554285]},
                                                 {'abc': [0.5, 0.0, 0.5],
                                                  'label': 'Al',
                                                  'properties': {'coordination_no': 10, 'forces': [0.0, 0.0, 0.0]},
                                                  'species': [{'element': 'Al', 'occu': 1}],
                                                  'xyz': [2.376693725, 0.0, 4.116554285]},
                                                 {'abc': [0.875, 0.875, 0.875],
                                                  'label': 'Lu',
                                                  'properties': {'coordination_no': 16, 'forces': [0.0, 0.0, 0.0]},
                                                  'species': [{'element': 'Lu', 'occu': 1}],
                                                  'xyz': [5.54561868875, 3.9213445862499996, 9.60529332875]},
                                                 {'abc': [0.125, 0.125, 0.125],
                                                  'label': 'Lu',
                                                  'properties': {'coordination_no': 16, 'forces': [0.0, 0.0, 0.0]},
                                                  'species': [{'element': 'Lu', 'occu': 1}],
                                                  'xyz': [0.79223124125, 0.56019208375, 1.37218476125]}]})
Ejemplo n.º 41
0
    def featurize(self,
                  structures: Iterable[JSON],
                  log_every_n: int = 1000) -> np.ndarray:
        """Calculate features for crystal structures.

    Parameters
    ----------
    structures: Iterable[JSON]
      Iterable sequence of pymatgen structure dictionaries.
      Json-serializable dictionary representation of pymatgen.core.structure
      https://pymatgen.org/pymatgen.core.structure.html
    log_every_n: int, default 1000
      Logging messages reported every `log_every_n` samples.

    Returns
    -------
    features: np.ndarray
      A numpy array containing a featurized representation of
      `structures`.

    """

        # Special case handling of single crystal structure
        if not isinstance(structures, Iterable):
            structures = [structures]
        else:
            # Convert iterables to list
            structures = list(structures)

        try:
            from pymatgen import Structure
        except ModuleNotFoundError:
            raise ValueError("This class requires pymatgen to be installed.")

        features = []
        for idx, structure in enumerate(structures):
            if idx % log_every_n == 0:
                logger.info("Featurizing datapoint %i" % idx)
            try:
                s = Structure.from_dict(structure)
                features.append(self._featurize(s))
            except:
                logger.warning(
                    "Failed to featurize datapoint %i. Appending empty array" %
                    idx)
                features.append(np.array([]))

        features = np.asarray(features)
        return features
Ejemplo n.º 42
0
def data_missing(xas_docs_for_mpid):
    """
    Do some sites have no spectra recorded?

    Checks symmetrically equivalent sites.
    """
    structure = Structure.from_dict(xas_docs_for_mpid[0]['structure'])
    ss = SymmSites(structure)
    absorbing_atoms = set([d['absorbing_atom'] for d in xas_docs_for_mpid])
    some_sites_absent = any(
        len(set(ss.get_equivalent_site_indices(i)) & absorbing_atoms) == 0
        for i in range(structure.num_sites))
    some_spectra_empty = any(
        len(d['spectrum']) == 0 for d in xas_docs_for_mpid)
    return some_sites_absent or some_spectra_empty
Ejemplo n.º 43
0
def path_percentage(doc):
    """path_percentage
    Normalizes the NEB image coordinates to a distance along the path.
    :param doc: MongoDB document.
    :return: A list of percentage values for each image.
    """
    structures = [Structure.from_dict(s) for s in doc["neb_images"]]
    w_i = determine_working_ion(structures[0])
    d_i = get_diffuser_index(structures, w_i)
    path_total = doc["NEB_analysis"]["path_length"]
    percentage_list = [0]
    for i in range(len(structures) - 1):
        path_distance += structures[i][d_i].distance(structures[i + 1][d_i])
        percentage_list.append(path_distance / path_sum * 100)
    return percentage_list
Ejemplo n.º 44
0
    def extract_run_data_info(self):
        """ read the json file and extract the structure """
        json_data_filename =  'run_data.json'
        file = open(json_data_filename ,'r')
        data_dictionary = json.load(file)
        file.close()

        last_step = data_dictionary['relaxation'][-1]
        structure_dict = last_step['structure']

        # we finally get the relaxed structure
        structure = Structure.from_dict(structure_dict)
        max_force = np.sqrt(np.sum(np.array(last_step['forces'])**2,axis=1)).max() 

        return structure, max_force 
Ejemplo n.º 45
0
    def match(self, snl, mats):
        """
        Finds a material doc that matches with the given snl

        Args:
            snl (dict): the snl doc
            mats ([dict]): the materials docs to match against

        Returns:
            dict: a materials doc if one is found otherwise returns None
        """
        sm = StructureMatcher(ltol=self.ltol, stol=self.stol, angle_tol=self.angle_tol,
                              primitive_cell=True, scale=True,
                              attempt_supercell=False, allow_subset=False,
                              comparator=ElementComparator())
        snl_struc = StructureNL.from_dict(snl).structure

        for m in mats:
            m_struct = Structure.from_dict(m["structure"])
            init_m_struct = Structure.from_dict(m["initial_structure"])
            if sm.fit(m_struct, snl_struc) or sm.fit(init_m_struct, snl_struc):
                return m[self.materials.key]

        return None
Ejemplo n.º 46
0
def run_prediction(_, store):
    """Returns the prediction table"""
    app.logger.info('triggering prediction update')
    try:
        if store['structure'] is not None:
            with drc.temp() as tempfilehandle:
                s = Structure.from_dict(store['structure'])  # pylint: disable=invalid-name
                s.to(filename=tempfilehandle.name, fmt='cif')
                prediction = predict(tempfilehandle.name)
            return prediction

        raise PreventUpdate
    except Exception as e:  # pylint:disable=broad-except,invalid-name
        print(e)
        raise PreventUpdate
Ejemplo n.º 47
0
    def _create_new_material(self, taskdoc):
        """
        Create a new material document.

        Args:
            taskdoc (dict): a JSON-like task document

        Returns:
            (int) - material_id of the new document
        """
        doc = {"created_at": datetime.utcnow()}
        doc["_tasksbuilder"] = {
            "all_task_ids": [],
            "prop_metadata": {
                "labels": {},
                "task_ids": {}
            },
            "updated_at": datetime.utcnow()
        }
        doc["spacegroup"] = taskdoc["output"]["spacegroup"]
        doc["structure"] = taskdoc["output"]["structure"]
        doc["material_id"] = dbid_to_str(
            self._m_prefix,
            self._counter.find_one_and_update(
                {"_id": "materialid"}, {"$inc": {
                    "c": 1
                }},
                return_document=ReturnDocument.AFTER)["c"])

        doc["sg_symbol"] = doc["spacegroup"]["symbol"]
        doc["sg_number"] = doc["spacegroup"]["number"]

        for x in [
                "formula_anonymous", "formula_pretty", "formula_reduced_abc",
                "elements", "nelements", "chemsys"
        ]:
            doc[x] = taskdoc[x]

        if "parent_structure" in taskdoc:
            doc["parent_structure"] = taskdoc["parent_structure"]
            t_struct = Structure.from_dict(
                taskdoc["parent_structure"]["structure"])
            doc["parent_structure"][
                "formula_reduced_abc"] = t_struct.composition.reduced_formula

        self._materials.insert_one(doc)

        return doc["material_id"]
Ejemplo n.º 48
0
def site_weighted_spectrum(xas_docs, num_samples=200):
    """
    Equivalent-site-weighted spectrum for a specie in a structure.

    Args:
        xas_docs (list): MongoDB docs for all XAS XANES K-edge spectra
            for a specie for a structure.
        num_samples (int): Number of samples for interpolation.
            Original data has 100 data points.

    Returns:
        tuple: a plottable (x, y) pair for the spectrum
    """
    maxes, mins = [], []
    fs = []
    multiplicities = []

    for doc in xas_docs:
        energies = [e[0] for e in doc["spectrum"]]
        # Checking the multiplicities of sites
        s = Structure.from_dict(doc['structure'])
        sa = SpacegroupAnalyzer(s)
        ss = sa.get_symmetrized_structure()
        multiplicity = len(ss.find_equivalent_sites(s[doc['absorbing_atom']]))
        multiplicities.append(multiplicity)

        # Getting axis limits for each spectrum for the sites corresponding to
        # K-edge is a bit tricky, because the x-axis data points don't align
        # among different spectra for the same structure. So, prepare for
        # interpolation within the intersection of x-axis ranges.
        maxes.append(doc['spectrum'][-1][0])
        mins.append(doc['spectrum'][0][0])
        d0 = np.array(doc['spectrum'])
        # use 3rd-order spline interpolation for chi (idx 5) vs energy (idx 0).
        f = interp1d(d0[:, 0],
                     d0[:, 5],
                     kind='cubic',
                     bounds_error=False,
                     fill_value=0)
        fs.append(f)

    x_axis = np.linspace(max(mins), min(maxes), num=num_samples)
    weighted_spectrum = np.zeros(num_samples)
    sum_multiplicities = sum(multiplicities)
    for i in range(len(multiplicities)):
        weighted_spectrum += (multiplicities[i] *
                              fs[i](x_axis)) / sum_multiplicities
    return (x_axis, weighted_spectrum)
Ejemplo n.º 49
0
def old_style_mat(new_style_mat):
    """
    Creates the base document for the old MP mapidoc style from the new document structure
    """

    mat = {}
    mp_conversion_dict = _settings["conversion_dict"]
    mag_types = _settings["mag_types"]

    # Uses the conversion dict to copy over values which handles the bulk of the work.
    for mp, new_key in mp_conversion_dict.items():
        if has(new_style_mat, new_key):
            set_(mat, mp, get(new_style_mat, new_key))

    # Anything coming through DFT is always ordered
    mat["is_ordered"] = True
    mat["is_compatible"] = True

    struc = Structure.from_dict(mat["structure"])
    mat["oxide_type"] = oxide_type(struc)
    mat["reduced_cell_formula"] = struc.composition.reduced_composition.as_dict()
    mat["unit_cell_formula"] = struc.composition.as_dict()
    mat["full_formula"] = "".join(struc.formula.split())
    vals = sorted(mat["reduced_cell_formula"].values())
    mat["anonymous_formula"] = {
        string.ascii_uppercase[i]: float(vals[i]) for i in range(len(vals))
    }
    mat["initial_structure"] = new_style_mat.get("initial_structure", None)

    set_(mat, "pseudo_potential.functional", "PBE")

    set_(
        mat,
        "pseudo_potential.labels",
        [
            p["titel"].split()[1]
            for p in get(new_style_mat, "calc_settings.potcar_spec")
        ],
    )
    set_(mat, "pseudo_potential.pot_type", "paw")

    mat["blessed_tasks"] = {
        d["task_type"]: d["task_id"] for d in new_style_mat["origins"]
    }
    mat["deprecated_tasks"] = new_style_mat.get("deprecated_tasks", [])
    mat["ntask_ids"] = len(mat["task_ids"])

    return mat
Ejemplo n.º 50
0
def min_anion_along_path(doc):
    """min_anion_along_path
    Gets the distance to the nearest anion along the whole path.
    :param doc: MongoDB document.
    :return: A list of distances to closest anion along NEB trajectory.
    """
    structures = [Structure.from_dict(s) for s in doc["neb_images"]]
    neb_energies = [float(e) for e in doc["NEB_analysis"]["path_energy"]]
    max_energy = max(neb_energies)
    activated_state_index = neb_energies.index(max_energy)
    w_i = determine_working_ion(structures[0])
    d_i = get_diffuser_index(structures, w_i)
    anion_dists = [
        get_anion_dist_in_struct(s, d_i) for i, s in enumerate(structures)
    ]
    return anion_dists
Ejemplo n.º 51
0
def get_struc_list(cifpath, json_name):
    """Import pymatgen Structure objects from a json.
    Args:
        cifpath (string): Filepath to json file
        json_name (string): Name of json file
    """
    with open('{0}{1}'.format(cifpath, json_name)) as f:
        saved_strucs = json.load(f)

    struc_list = []
    for i, entry in enumerate(tqdm(saved_strucs)):
        struc_list.append({
            'structure': Structure.from_dict(entry['structure']),
            'id': entry['id']
        })
    return (struc_list)
Ejemplo n.º 52
0
    def from_dict(cls, dictionary):
        """
        Reconsitite a TwoD_Structure object from a dictionary reprentation of TwoD_Structure or pymatgen.Structure
        input argument:
            dictionary (dict): Dictionary representation of structure
        """
        structure = Structure.from_dict(dictionary)
        input_argument_dict = {
            "lattice": structure.lattice,
            "species": structure.species,
            "coords": structure.frac_coords,
            "coords_are_cartesian": False,
            "charge": structure.charge
        }

        return TwoD_Structure(**input_argument_dict)
Ejemplo n.º 53
0
    def extract_run_data_info(self):
        """ read the json file and extract the structure """
        json_data_filename = 'run_data.json'
        file = open(json_data_filename, 'r')
        data_dictionary = json.load(file)
        file.close()

        last_step = data_dictionary['relaxation'][-1]
        structure_dict = last_step['structure']

        # we finally get the relaxed structure
        structure = Structure.from_dict(structure_dict)
        max_force = np.sqrt(np.sum(np.array(last_step['forces'])**2,
                                   axis=1)).max()

        return structure, max_force
Ejemplo n.º 54
0
    def pre_save_post_validation(cls, sender, document, **kwargs):
        from mpcontribs.api.structures.views import StructuresResource

        resource = StructuresResource()
        d = resource.serialize(document, fields=["lattice", "sites", "charge"])
        s = json.dumps(d, sort_keys=True).encode("utf-8")
        document.md5 = md5(s).hexdigest()
        structure = Structure.from_dict(d)

        try:
            writer = CifWriter(structure, symprec=1e-10)
        except TypeError:
            # save CIF string without symmetry information
            writer = CifWriter(structure)

        document.cif = writer.__str__()
def doc_to_snl(doc):
    return StructureNL(Structure.from_dict(doc['structure']), [{"name": "Saurabh Bajaj", "email": "*****@*****.**"},
                                                               {"name": "Anubhav Jain", "email": "*****@*****.**"}], [],
                       '', ['Pauling file'], {'_pauling_file': {'key': doc['key'],
                                                                'reference': doc['metadata']['_Springer']['geninfo'][
                                                                    'ref'], 'general_info': {
                'Sample Detail(s)': doc['metadata']['_Springer']['geninfo']['Sample Detail(s)'],
                'Standard Formula': doc['metadata']['_Springer']['geninfo']['Standard Formula'],
                'Mineral Name(s)': doc['metadata']['_Springer']['geninfo']['Mineral Name(s)'],
                'Phase Prototype': doc['metadata']['_Springer']['geninfo']['Phase Prototype'],
                'Structure Class(es)': doc['metadata']['_Springer']['geninfo']['Structure Class(es)'],
                'Measurement Detail(s)': doc['metadata']['_Springer']['geninfo']['Measurement Detail(s)'],
                'Phase Label(s)': doc['metadata']['_Springer']['geninfo']['Phase Label(s)']},
                                                                'expdetails': doc['metadata']['_Springer'][
                                                                    'expdetails'],
                                                                'title': doc['metadata']['_Springer']['title']}},
                       [{'name': 'Pauling file', 'url': doc['webpage_link'], 'description': {'key': doc['key']}}])
Ejemplo n.º 56
0
 def test_write_read_cfgs(self):
     self.potential.write_cfgs("test.xyz", cfg_pool=self.test_pool)
     datapool, df = self.potential.read_cfgs("test.xyz")
     self.assertEqual(len(self.test_pool), len(datapool))
     for data1, data2 in zip(self.test_pool, datapool):
         struct1 = data1["structure"]
         struct2 = Structure.from_dict(data2["structure"])
         self.assertTrue(struct1 == struct2)
         energy1 = data1["outputs"]["energy"]
         energy2 = data2["outputs"]["energy"]
         self.assertAlmostEqual(energy1, energy2)
         forces1 = np.array(data1["outputs"]["forces"])
         forces2 = data2["outputs"]["forces"]
         np.testing.assert_array_almost_equal(forces1, forces2)
         stress1 = np.array(data1["outputs"]["virial_stress"])
         stress2 = data2["outputs"]["virial_stress"]
         np.testing.assert_array_almost_equal(stress1, stress2)
Ejemplo n.º 57
0
    def process_item(self, item):
        """
        Process the tasks and materials into a magnetism collection

        Args:
            item dict: a dict of material_id, structure, and tasks

        Returns:
            dict: a magnetism dictionary
        """

        struct = Structure.from_dict(item["structure"])
        total_magnetization = item["magnetism"].get(
            "total_magnetization", 0)  # not necessarily == sum(magmoms)
        msa = CollinearMagneticStructureAnalyzer(struct)

        sign = np.sign(total_magnetization)
        total_magnetization = abs(total_magnetization)
        magmoms = list(sign * np.array(msa.magmoms))

        magnetism = {
            self.magnetism.key: item[self.materials.key],
            "magnetism": {
                'ordering':
                msa.ordering.value,
                'is_magnetic':
                msa.is_magnetic,
                'exchange_symmetry':
                msa.get_exchange_group_info()[1],
                'num_magnetic_sites':
                msa.number_of_magnetic_sites,
                'num_unique_magnetic_sites':
                msa.number_of_unique_magnetic_sites(),
                'types_of_magnetic_species':
                [str(t) for t in msa.types_of_magnetic_specie],
                'magmoms':
                magmoms,
                'total_magnetization_normalized_vol':
                total_magnetization / struct.volume,
                'total_magnetization_normalized_formula_units':
                total_magnetization /
                (struct.composition.get_reduced_composition_and_factor()[1])
            },
            "pymatgen_version": pymatgen_version
        }
        return magnetism
Ejemplo n.º 58
0
def task_dict_to_wf(task_dict, launchpad):
    fw_id = launchpad.get_new_fw_id()
    l_id = launchpad.get_new_launch_id()

    spec = {'task_type': task_dict['task_type'], 'run_tags': task_dict['run_tags'],
            'vaspinputset_name': None, 'vasp': None, 'mpsnl': task_dict['snl'],
            'snlgroup_id': task_dict['snlgroup_id']}
    tasks = [DummyLegacyTask()]

    launch_dir = task_dict['dir_name_full']

    stored_data = {'error_list': []}
    update_spec = {'prev_vasp_dir': task_dict['dir_name'],
                   'prev_task_type': spec['task_type'],
                   'mpsnl': spec['mpsnl'], 'snlgroup_id': spec['snlgroup_id'],
                   'run_tags': spec['run_tags']}

    fwaction = FWAction(stored_data=stored_data, update_spec=update_spec)

    if task_dict['completed_at']:
        complete_date = datetime.datetime.strptime(task_dict['completed_at'], "%Y-%m-%d %H:%M:%S")
        state_history = [{"created_on": complete_date, 'state': 'COMPLETED'}]
    else:
        state_history = []

    launches = [Launch('COMPLETED', launch_dir, fworker=None, host=None, ip=None, action=fwaction,
                       state_history=state_history, launch_id=l_id, fw_id=fw_id)]

    f = Composition(task_dict['pretty_formula']).alphabetical_formula


    fw = Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), launches=launches, state='COMPLETED', created_on=None,
                 fw_id=fw_id)

    wf_meta = get_meta_from_structure(Structure.from_dict(task_dict['snl']))
    wf_meta['run_version'] = 'preproduction (0)'

    wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta)

    launchpad.add_wf(wf, reassign_all=False)
    launchpad._upsert_launch(launches[0])

    print 'ADDED', fw_id
    # return fw_id
    return fw_id
Ejemplo n.º 59
0
    def from_dict(d):
        a = d["about"]
        dec = MontyDecoder()

        created_at = dec.process_decoded(a["created_at"]) if "created_at" in a \
            else None
        data = {k: v for k, v in d["about"].items()
                if k.startswith("_")}
        data = dec.process_decoded(data)

        structure = Structure.from_dict(d) if "lattice" in d \
            else Molecule.from_dict(d)
        return MPStructureNL(structure, a["authors"],
                             projects=a.get("projects", None),
                             references=a.get("references", ""),
                             remarks=a.get("remarks", None), data=data,
                             history=a.get("history", None),
                             created_at=created_at)
Ejemplo n.º 60
0
def modify_to_soc(original_wf, nbands, structure=None, modify_incar_params=None, fw_name_constraint=None):
    """
    Takes a regular workflow and transforms its VASP fireworkers that are specified with
    fw_name_constraints to non-collinear calculations taking spin orbit coupling into account.

    Args:
        original_wf (Workflow)
        nbands (int): number of bands selected by the user (for now)
        structure (Structure)
        modify_incar_params ({}): a dictionary containing the setting for modyfining the INCAR (e.g. {"ICHARG": 11})
        fw_name_constraint (string): name of the fireworks to be modified (all if None is passed)

    Returns:
        modified Workflow with SOC
    """

    wf_dict = original_wf.to_dict()
    if structure is None:
        try:
            sid = get_fws_and_tasks(original_wf, fw_name_constraint="structure optimization",
                                    task_name_constraint="RunVasp")[0][0]
            structure = Structure.from_dict(wf_dict["fws"][sid]["spec"]["_tasks"][1]["vasp_input_set"]["structure"])
        except:
            raise ValueError("For this workflow, the structure must be provided as an input")
    magmom = ""
    for i in structure:
        magmom += "0 0 0.6 "
    # TODO: add saxis as an input parameter with default being (0 0 1)
    modify_incar_params = modify_incar_params or {"incar_update": {"LSORBIT": "T", "NBANDS": nbands, "MAGMOM": magmom,
                                                    "ISPIN": 1, "LMAXMIX": 4, "ISYM": 0}}

    for idx_fw, idx_t in get_fws_and_tasks(original_wf, fw_name_constraint=fw_name_constraint,
                                           task_name_constraint="RunVasp"):
        if "nscf" in wf_dict["fws"][idx_fw]["name"]:
            wf_dict["fws"][idx_fw]["spec"]["_tasks"][idx_t]["vasp_cmd"] = ">>vasp_ncl<<"
            wf_dict["fws"][idx_fw]["spec"]["_tasks"].insert(idx_t, ModifyIncar(**modify_incar_params).to_dict())

        wf_dict["fws"][idx_fw]["name"] += " soc"

    for idx_fw, idx_t in get_fws_and_tasks(original_wf, fw_name_constraint=fw_name_constraint,
                                           task_name_constraint="RunBoltztrap"):
        wf_dict["fws"][idx_fw]["name"] += " soc"

    return Workflow.from_dict(wf_dict)