示例#1
0
    def test_detect_output_file_paths(self):
        drone = VaspDrone()
        doc = drone.assimilate(self.Si_static)

        self.assertDictEqual(
            {
                "chgcar": "CHGCAR.gz",
                "locpot": "LOCPOT.gz",
                "aeccar0": "AECCAR0.gz",
                "aeccar1": "AECCAR1.gz",
                "aeccar2": "AECCAR2.gz",
                "procar": "PROCAR.gz",
                "wavecar": "WAVECAR.gz",
            },
            doc["calcs_reversed"][0]["output_file_paths"],
        )

        doc = drone.assimilate(self.relax2)
        self.assertDictEqual(
            {
                "chgcar": "CHGCAR.relax1.gz",
                "procar": "PROCAR.relax1.gz",
                "wavecar": "WAVECAR.relax1.gz",
            },
            doc["calcs_reversed"][1]["output_file_paths"],
        )
示例#2
0
 def test_runs_assimilate(self):
     drone = VaspDrone(runs=["relax1", "relax2"])
     doc = drone.assimilate(self.relax2)
     oszicar2 = Oszicar(os.path.join(self.relax2, "OSZICAR.relax2.gz"))
     outcar1 = Outcar(os.path.join(self.relax2, "OUTCAR.relax1.gz"))
     outcar2 = Outcar(os.path.join(self.relax2, "OUTCAR.relax2.gz"))
     outcar1 = outcar1.as_dict()
     outcar2 = outcar2.as_dict()
     run_stats1 = outcar1.pop("run_stats")
     run_stats2 = outcar2.pop("run_stats")
     self.assertEqual(len(doc["calcs_reversed"]), 2)
     self.assertEqual(doc["composition_reduced"], {"Si": 1.0})
     self.assertEqual(doc["composition_unit_cell"], {"Si": 2.0})
     self.assertAlmostEqual(doc["output"]["energy"], oszicar2.ionic_steps[-1]["E0"])
     self.assertEqual(doc["formula_pretty"], "Si")
     self.assertEqual(doc["formula_anonymous"], "A")
     self.assertEqual(
         list(doc["calcs_reversed"][0]["input"].keys()),
         list(doc["calcs_reversed"][1]["input"].keys()),
     )
     self.assertEqual(
         list(doc["calcs_reversed"][0]["output"].keys()),
         list(doc["calcs_reversed"][1]["output"].keys()),
     )
     self.assertEqual(
         doc["calcs_reversed"][0]["output"]["energy"], doc["output"]["energy"]
     )
     self.assertEqual(
         doc["run_stats"][doc["calcs_reversed"][0]["task"]["name"]], run_stats2
     )
     self.assertEqual(
         doc["run_stats"][doc["calcs_reversed"][1]["task"]["name"]], run_stats1
     )
     self.assertEqual(doc["calcs_reversed"][0]["output"]["outcar"], outcar2)
     self.assertEqual(doc["calcs_reversed"][1]["output"]["outcar"], outcar1)
 def test_runs_assimilate(self):
     drone = VaspDrone(runs=["relax1", "relax2"])
     doc = drone.assimilate(self.relax2)
     oszicar2 = Oszicar(os.path.join(self.relax2, "OSZICAR.relax2.gz"))
     outcar1 = Outcar(os.path.join(self.relax2, "OUTCAR.relax1.gz"))
     outcar2 = Outcar(os.path.join(self.relax2, "OUTCAR.relax2.gz"))
     outcar1 = outcar1.as_dict()
     outcar2 = outcar2.as_dict()
     run_stats1 = outcar1.pop("run_stats")
     run_stats2 = outcar2.pop("run_stats")
     self.assertEqual(len(doc["calcs_reversed"]), 2)
     self.assertEqual(doc["composition_reduced"], {"Si": 1.0})
     self.assertEqual(doc["composition_unit_cell"], {"Si": 2.0})
     self.assertAlmostEqual(doc["output"]["energy"], oszicar2.ionic_steps[-1]["E0"])
     self.assertEqual(doc["formula_pretty"], "Si")
     self.assertEqual(doc["formula_anonymous"], "A")
     self.assertEqual(list(doc["calcs_reversed"][0]["input"].keys()), list(doc["calcs_reversed"][1]["input"].keys()))
     self.assertEqual(
         list(doc["calcs_reversed"][0]["output"].keys()), list(doc["calcs_reversed"][1]["output"].keys())
     )
     self.assertEqual(doc["calcs_reversed"][0]["output"]["energy"], doc["output"]["energy"])
     self.assertEqual(doc["run_stats"][doc["calcs_reversed"][0]["task"]["name"]], run_stats2)
     self.assertEqual(doc["run_stats"][doc["calcs_reversed"][1]["task"]["name"]], run_stats1)
     self.assertEqual(doc["calcs_reversed"][0]["output"]["outcar"], outcar2)
     self.assertEqual(doc["calcs_reversed"][1]["output"]["outcar"], outcar1)
示例#4
0
    def run_task(self, fw_spec):
        # get the directory that contains the VASP dir to parse
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"],
                                    fw_spec["calc_locs"])["path"]

        # parse the VASP directory
        logger.info("PARSING DIRECTORY: {}".format(calc_dir))

        drone = VaspDrone(additional_fields=self.get("additional_fields"),
                          parse_dos=self.get("parse_dos", False),
                          bandstructure_mode=self.get("bandstructure_mode",
                                                      False))

        # assimilate (i.e., parse)
        task_doc = drone.assimilate(calc_dir)

        # Check for additional keys to set based on the fw_spec
        if self.get("fw_spec_field"):
            task_doc.update(fw_spec[self.get("fw_spec_field")])

        # get the database connection
        db_file = env_chk(self.get('db_file'), fw_spec)

        # db insertion or taskdoc dump
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(task_doc, default=DATETIME_HANDLER))
        else:
            mmdb = VaspMDCalcDb.from_db_file(db_file, admin=True)

            # prevent duplicate insertion
            mmdb.db.tasks.find_one_and_delete({
                'formula_pretty':
                task_doc['formula_pretty'],
                'task_label':
                task_doc['task_label']
            })

            t_id = mmdb.insert_task(
                task_doc,
                parse_dos=self.get("parse_dos", False),
                parse_bs=bool(self.get("bandstructure_mode", False)),
                md_structures=self.get("md_structures", True))

            logger.info("Finished parsing with task_id: {}".format(t_id))

        if self.get("defuse_unsuccessful", True):
            defuse_children = (task_doc["state"] != "successful")
        else:
            defuse_children = False

        return FWAction(stored_data={"task_id": task_doc.get("task_id", None)},
                        defuse_children=defuse_children)
示例#5
0
 def test_parse_chrgcar(self):
     drone = VaspDrone(parse_chgcar=True, parse_aeccar=True)
     doc = drone.assimilate(self.Si_static)
     cc = doc['calcs_reversed'][0]['chgcar']
     self.assertAlmostEqual(cc.data['total'].sum()/cc.ngridpts, 8.0, 4)
     cc = doc['calcs_reversed'][0]['aeccar0']
     self.assertAlmostEqual(cc.data['total'].sum()/cc.ngridpts, 23.253588293583313, 4)
     cc = doc['calcs_reversed'][0]['aeccar2']
     self.assertAlmostEqual(cc.data['total'].sum()/cc.ngridpts, 8.01314480789829, 4)
示例#6
0
 def test_parse_chrgcar(self):
     drone = VaspDrone(parse_chgcar=True, parse_aeccar=True)
     doc = drone.assimilate(self.Si_static)
     cc = doc['calcs_reversed'][0]['chgcar']
     self.assertAlmostEqual(cc.data['total'].sum() / cc.ngridpts, 8.0, 4)
     cc = doc['calcs_reversed'][0]['aeccar0']
     self.assertAlmostEqual(cc.data['total'].sum() / cc.ngridpts,
                            23.253588293583313, 4)
     cc = doc['calcs_reversed'][0]['aeccar2']
     self.assertAlmostEqual(cc.data['total'].sum() / cc.ngridpts,
                            8.01314480789829, 4)
示例#7
0
    def run_task(self, fw_spec):
        # get the directory that contains the VASP dir to parse
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"],
                                    fw_spec["calc_locs"])["path"]

        # parse the VASP directory
        logger.info("PARSING DIRECTORY: {}".format(calc_dir))

        drone = VaspDrone(additional_fields=self.get("additional_fields"),
                          parse_dos=self.get("parse_dos", False),
                          compress_dos=1,
                          bandstructure_mode=self.get("bandstructure_mode",
                                                      False),
                          compress_bs=1)

        # assimilate (i.e., parse)
        task_doc = drone.assimilate(calc_dir)

        # Check for additional keys to set based on the fw_spec
        if self.get("fw_spec_field"):
            task_doc.update(fw_spec[self.get("fw_spec_field")])

        # get the database connection
        db_file = env_chk(self.get('db_file'), fw_spec)

        # db insertion or taskdoc dump
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(task_doc, default=DATETIME_HANDLER))
        else:
            mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
            t_id = mmdb.insert_task(task_doc,
                                    parse_dos=self.get("parse_dos", False),
                                    parse_bs=bool(
                                        self.get("bandstructure_mode", False)))
            logger.info("Finished parsing with task_id: {}".format(t_id))

        defuse_children = False
        if task_doc["state"] != "successful":
            if self.get("defuse_unsuccessful", DEFUSE_UNSUCCESSFUL) is True:
                defuse_children = True
            elif self.get("defuse_unsuccessful", DEFUSE_UNSUCCESSFUL).lower() \
                    == "fizzle":
                raise RuntimeError(
                    "VaspToDb indicates that job is not successful "
                    "(perhaps your job did not converge within the "
                    "limit of electronic/ionic iterations)!")

        return FWAction(stored_data={"task_id": task_doc.get("task_id", None)},
                        defuse_children=defuse_children)
 def test_bandstructure(self):
     drone = VaspDrone()
     doc = drone.assimilate(self.Al)
     self.assertEqual(doc["composition_reduced"], {"Al": 1.0})
     self.assertEqual(doc["formula_pretty"], "Al")
     self.assertEqual(doc["formula_anonymous"], "A")
     for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
         self.assertIsNone(d["vbm"])
         self.assertIsNone(d["cbm"])
         self.assertEqual(d["bandgap"], 0.0)
         self.assertFalse(d["is_gap_direct"])
         self.assertTrue(d["is_metal"])
示例#9
0
 def test_bandstructure(self):
     drone = VaspDrone()
     doc = drone.assimilate(self.Al)
     self.assertEqual(doc["composition_reduced"], {'Al': 1.0})
     self.assertEqual(doc["formula_pretty"], 'Al')
     self.assertEqual(doc["formula_anonymous"], 'A')
     for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
         self.assertIsNone(d["vbm"])
         self.assertIsNone(d["cbm"])
         self.assertEqual(d["bandgap"], 0.0)
         self.assertFalse(d["is_gap_direct"])
         self.assertTrue(d["is_metal"])
示例#10
0
    def test_parse_chrgcar(self):
        drone = VaspDrone(parse_chgcar=True, parse_aeccar=True)
        doc = drone.assimilate(self.Si_static)

        cc = decoder.process_decoded(doc["calcs_reversed"][0]["chgcar"])
        self.assertAlmostEqual(cc.data["total"].sum() / cc.ngridpts, 8.0, 4)
        cc = decoder.process_decoded(doc["calcs_reversed"][0]["aeccar0"])
        self.assertAlmostEqual(cc.data["total"].sum() / cc.ngridpts,
                               23.253588293583313, 4)
        cc = decoder.process_decoded(doc["calcs_reversed"][0]["aeccar2"])
        self.assertAlmostEqual(cc.data["total"].sum() / cc.ngridpts,
                               8.01314480789829, 4)
示例#11
0
 def test_assimilate(self):
     drone = VaspDrone()
     doc = drone.assimilate(self.relax)
     # Only the main changes from the vasprun as dict format and currently
     # used schema in pymatgen-db are tested for now.
     self.assertEqual(doc["composition_reduced"], {"Si": 1.0})
     self.assertEqual(doc["composition_unit_cell"], {"Si": 2.0})
     self.assertAlmostEqual(doc["output"]["energy"], -10.84671647)
     self.assertEqual(doc["formula_pretty"], "Si")
     self.assertEqual(doc["formula_anonymous"], "A")
     self.assertEqual(doc["calcs_reversed"][0]["output"]["energy"], doc["output"]["energy"])
     self.assertEqual(doc["input"]["parameters"]["ISMEAR"], -5)
    def run_task(self, fw_spec):

        # Get optimized structure
        # TODO: will this find the correct path if the workflow is rerun from the start?
        optimize_loc = fw_spec["calc_locs"][0]["path"]
        logger.info("PARSING INITIAL OPTIMIZATION DIRECTORY: {}".format(optimize_loc))
        drone = VaspDrone()
        optimize_doc = drone.assimilate(optimize_loc)
        opt_struct = Structure.from_dict(optimize_doc["calcs_reversed"][0]["output"]["structure"])

        d = {"analysis": {}, "deformation_tasks": fw_spec["deformation_tasks"],
             "initial_structure": self['structure'].as_dict(),
             "optimized_structure": opt_struct.as_dict()}
        if fw_spec.get("tags",None):
            d["tags"] = fw_spec["tags"]
        dtypes = fw_spec["deformation_tasks"].keys()
        defos = [fw_spec["deformation_tasks"][dtype]["deformation_matrix"]
                 for dtype in dtypes]
        stresses = [fw_spec["deformation_tasks"][dtype]["stress"] for dtype in dtypes]
        stress_dict = {IndependentStrain(defo) : Stress(stress) for defo, stress
                       in zip(defos, stresses)}

        logger.info("ANALYZING STRESS/STRAIN DATA")
        # DETERMINE IF WE HAVE 6 "UNIQUE" deformations
        if len(set([de[:3] for de in dtypes])) == 6:
            # Perform Elastic tensor fitting and analysis
            result = ElasticTensor.from_stress_dict(stress_dict)
            d["elastic_tensor"] = result.voigt.tolist()
            kg_average = result.kg_average
            d.update({"K_Voigt": kg_average[0], "G_Voigt": kg_average[1],
                      "K_Reuss": kg_average[2], "G_Reuss": kg_average[3],
                      "K_Voigt_Reuss_Hill": kg_average[4],
                      "G_Voigt_Reuss_Hill": kg_average[5]})
            d["universal_anisotropy"] = result.universal_anisotropy
            d["homogeneous_poisson"] = result.homogeneous_poisson

        else:
            raise ValueError("Fewer than 6 unique deformations")

        d["state"] = "successful"

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = MMVaspDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("ELASTIC ANALYSIS COMPLETE")
        return FWAction()
示例#13
0
    def test_parse_locpot(self):
        drone = VaspDrone(parse_locpot=True)
        doc = drone.assimilate(self.Si_static)

        self.assertTrue(drone.parse_locpot)
        self.assertTrue('locpot' in doc['calcs_reversed'][0]['output'])
        self.assertTrue(0 in doc['calcs_reversed'][0]['output']['locpot'])
        self.assertTrue(1 in doc['calcs_reversed'][0]['output']['locpot'])
        self.assertTrue(2 in doc['calcs_reversed'][0]['output']['locpot'])

        self.assertAlmostEqual(np.sum(doc['calcs_reversed'][0]['output']['locpot'][0]),0)
        self.assertAlmostEqual(np.sum(doc['calcs_reversed'][0]['output']['locpot'][1]),0)
        self.assertAlmostEqual(np.sum(doc['calcs_reversed'][0]['output']['locpot'][2]),0)
示例#14
0
 def test_assimilate(self):
     drone = VaspDrone()
     doc = drone.assimilate(self.relax)
     # Only the main changes from the vasprun as dict format and currently
     # used schema in pymatgen-db are tested for now.
     self.assertEqual(doc["composition_reduced"], {'Si': 1.0})
     self.assertEqual(doc["composition_unit_cell"], {'Si': 2.0})
     self.assertAlmostEqual(doc["output"]["energy"], -10.84671647)
     self.assertEqual(doc["formula_pretty"], 'Si')
     self.assertEqual(doc["formula_anonymous"], 'A')
     self.assertEqual(doc["calcs_reversed"][0]["output"]["energy"],
                      doc["output"]["energy"])
     self.assertEqual(doc["input"]["parameters"]["ISMEAR"], -5)
示例#15
0
    def test_bandstructure(self):
        drone = VaspDrone()

        doc = drone.assimilate(self.relax2)
        self.assertEqual(doc["composition_reduced"], {"Si": 1.0})
        self.assertEqual(doc["formula_pretty"], "Si")
        self.assertEqual(doc["formula_anonymous"], "A")
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertAlmostEqual(d["vbm"], 5.6147)
            self.assertAlmostEqual(d["cbm"], 6.2652)
            self.assertAlmostEqual(d["bandgap"], 0.6505)
            self.assertFalse(d["is_gap_direct"])
            self.assertFalse(d["is_metal"])
            self.assertNotIn("transition", d)
            self.assertAlmostEqual(d["direct_gap"], 2.5561)
            self.assertNotIn("bandstructure", doc["calcs_reversed"][0])

        doc = drone.assimilate(self.Si_static)
        self.assertEqual(doc["composition_reduced"], {"Si": 1.0})
        self.assertEqual(doc["formula_pretty"], "Si")
        self.assertEqual(doc["formula_anonymous"], "A")
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertAlmostEqual(d["vbm"], 5.6138)
            self.assertAlmostEqual(d["cbm"], 6.2644)
            self.assertAlmostEqual(d["bandgap"], 0.6506)
            self.assertFalse(d["is_gap_direct"])
            self.assertFalse(d["is_metal"])
            self.assertNotIn("transition", d)
            self.assertAlmostEqual(d["direct_gap"], 2.5563)
            self.assertIn("bandstructure", doc["calcs_reversed"][0])
        band_props = doc["calcs_reversed"][0]["output"][
            "eigenvalue_band_properties"]
        self.assertAlmostEqual(band_props["bandgap"], 0.6505999999999998)
        self.assertAlmostEqual(band_props["cbm"], 6.2644)
        self.assertAlmostEqual(band_props["vbm"], 5.6138)
        self.assertFalse(band_props["is_gap_direct"])

        doc = drone.assimilate(self.Al)
        self.assertEqual(doc["composition_reduced"], {"Al": 1.0})
        self.assertEqual(doc["formula_pretty"], "Al")
        self.assertEqual(doc["formula_anonymous"], "A")
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertIsNone(d["vbm"])
            self.assertIsNone(d["cbm"])
            self.assertEqual(d["bandgap"], 0.0)
            self.assertFalse(d["is_gap_direct"])
            self.assertTrue(d["is_metal"])
            self.assertEqual(
                doc["calcs_reversed"][0]["bandstructure"]["@class"],
                "BandStructureSymmLine",
            )
示例#16
0
 def test_assimilate(self):
     drone = VaspDrone()
     doc = drone.assimilate(self.relax)
     # Only the main changes from the vasprun as dict format and currently
     # used schema in pymatgen-db are tested for now.
     self.assertEqual(doc["composition_reduced"], {'Si': 1.0})
     self.assertEqual(doc["composition_unit_cell"], {'Si': 2.0})
     self.assertAlmostEqual(doc["output"]["energy"], -10.84671647)
     self.assertTrue(np.allclose(doc["output"]["forces"], [[0, 0, 0], [0, 0, 0]]))
     self.assertAlmostEqual(doc['output']['stress'][0][0], -0.08173155)
     self.assertEqual(doc["formula_pretty"], 'Si')
     self.assertEqual(doc["formula_anonymous"], 'A')
     self.assertEqual(doc["calcs_reversed"][0]["output"]["energy"], doc["output"]["energy"])
     self.assertEqual(doc["input"]["parameters"]["ISMEAR"], -5)
示例#17
0
 def test_parse_optical(self):
     drone = VaspDrone()
     doc = drone.assimilate(self.optics)
     self.assertIsNotNone(doc["output"]["dielectric"]["energy"])
     self.assertIsNotNone(doc["output"]["dielectric"]["real"])
     self.assertIsNotNone(doc["output"]["dielectric"]["imag"])
     self.assertEqual(
         len(doc["output"]["dielectric"]["energy"]),
         len(doc["output"]["dielectric"]["imag"]),
     )
     self.assertEqual(
         len(doc["output"]["dielectric"]["energy"]),
         len(doc["output"]["optical_absorption_coeff"]),
     )
示例#18
0
    def test_parse_locpot(self):
        drone = VaspDrone(parse_locpot=True)
        doc = drone.assimilate(self.Si_static)

        self.assertTrue(drone.parse_locpot)
        self.assertTrue("locpot" in doc["calcs_reversed"][0]["output"])
        self.assertTrue(0 in doc["calcs_reversed"][0]["output"]["locpot"])
        self.assertTrue(1 in doc["calcs_reversed"][0]["output"]["locpot"])
        self.assertTrue(2 in doc["calcs_reversed"][0]["output"]["locpot"])

        self.assertAlmostEqual(
            np.sum(doc["calcs_reversed"][0]["output"]["locpot"][0]), 0)
        self.assertAlmostEqual(
            np.sum(doc["calcs_reversed"][0]["output"]["locpot"][1]), 0)
        self.assertAlmostEqual(
            np.sum(doc["calcs_reversed"][0]["output"]["locpot"][2]), 0)
示例#19
0
 def test_assimilate(self):
     drone = VaspDrone()
     doc = drone.assimilate(self.relax)
     # Only the main changes from the vasprun as dict format and currently
     # used schema in pymatgen-db are tested for now.
     self.assertEqual(doc["composition_reduced"], {"Si": 1.0})
     self.assertEqual(doc["composition_unit_cell"], {"Si": 2.0})
     self.assertAlmostEqual(doc["output"]["energy"], -10.84671647)
     self.assertTrue(np.allclose(doc["output"]["forces"], [[0, 0, 0], [0, 0, 0]]))
     self.assertAlmostEqual(doc["output"]["stress"][0][0], -0.08173155)
     self.assertEqual(doc["formula_pretty"], "Si")
     self.assertEqual(doc["formula_anonymous"], "A")
     self.assertEqual(
         doc["calcs_reversed"][0]["output"]["energy"], doc["output"]["energy"]
     )
     self.assertEqual(doc["input"]["parameters"]["ISMEAR"], -5)
示例#20
0
    def test_detect_output_file_paths(self):
        drone = VaspDrone()
        doc = drone.assimilate(self.Si_static)

        self.assertDictEqual({
            'chgcar': 'CHGCAR.gz',
            'locpot': 'LOCPOT.gz',
            'aeccar0': 'AECCAR0.gz',
            'aeccar1': 'AECCAR1.gz',
            'aeccar2': 'AECCAR2.gz',
            'procar': 'PROCAR.gz',
            'wavecar': 'WAVECAR.gz'
        }, doc['calcs_reversed'][0]['output_file_paths'])

        doc = drone.assimilate(self.relax2)
        self.assertDictEqual({'chgcar': 'CHGCAR.relax1.gz', 'procar': 'PROCAR.relax1.gz',
                              'wavecar': 'WAVECAR.relax1.gz'},
                             doc['calcs_reversed'][1]['output_file_paths'])
示例#21
0
    def test_bandstructure(self):
        drone = VaspDrone()

        doc = drone.assimilate(self.relax2)
        self.assertEqual(doc["composition_reduced"], {'Si': 1.0})
        self.assertEqual(doc["formula_pretty"], 'Si')
        self.assertEqual(doc["formula_anonymous"], 'A')
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertAlmostEqual(d["vbm"],5.6147)
            self.assertAlmostEqual(d["cbm"],6.2652)
            self.assertAlmostEqual(d["bandgap"], 0.6505)
            self.assertFalse(d["is_gap_direct"])
            self.assertFalse(d["is_metal"])
            self.assertNotIn("transition",d)
            self.assertAlmostEqual(d["direct_gap"],2.5561)
            self.assertNotIn("bandstructure",doc["calcs_reversed"][0])


        doc = drone.assimilate(self.Si_static)
        self.assertEqual(doc["composition_reduced"], {'Si': 1.0})
        self.assertEqual(doc["formula_pretty"], 'Si')
        self.assertEqual(doc["formula_anonymous"], 'A')
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertAlmostEqual(d["vbm"],5.6138)
            self.assertAlmostEqual(d["cbm"],6.2644)
            self.assertAlmostEqual(d["bandgap"],  0.6506)
            self.assertFalse(d["is_gap_direct"])
            self.assertFalse(d["is_metal"])
            self.assertNotIn("transition",d)
            self.assertAlmostEqual(d["direct_gap"],2.5563)
            self.assertIn("bandstructure", doc["calcs_reversed"][0])


        doc = drone.assimilate(self.Al)
        self.assertEqual(doc["composition_reduced"], {'Al': 1.0})
        self.assertEqual(doc["formula_pretty"], 'Al')
        self.assertEqual(doc["formula_anonymous"], 'A')
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertIsNone(d["vbm"])
            self.assertIsNone(d["cbm"])
            self.assertEqual(d["bandgap"], 0.0)
            self.assertFalse(d["is_gap_direct"])
            self.assertTrue(d["is_metal"])
            self.assertEqual(doc["calcs_reversed"][0]["bandstructure"]["@class"],"BandStructureSymmLine")
示例#22
0
    def test_bandstructure(self):
        drone = VaspDrone()

        doc = drone.assimilate(self.relax2)
        self.assertEqual(doc["composition_reduced"], {'Si': 1.0})
        self.assertEqual(doc["formula_pretty"], 'Si')
        self.assertEqual(doc["formula_anonymous"], 'A')
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertAlmostEqual(d["vbm"], 5.6147)
            self.assertAlmostEqual(d["cbm"], 6.2652)
            self.assertAlmostEqual(d["bandgap"], 0.6505)
            self.assertFalse(d["is_gap_direct"])
            self.assertFalse(d["is_metal"])
            self.assertNotIn("bandstructure", doc["calcs_reversed"][0])

        doc = drone.assimilate(self.Si_static)
        self.assertEqual(doc["composition_reduced"], {'Si': 1.0})
        self.assertEqual(doc["formula_pretty"], 'Si')
        self.assertEqual(doc["formula_anonymous"], 'A')
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertAlmostEqual(d["vbm"], 5.6138)
            self.assertAlmostEqual(d["cbm"], 6.2644)
            self.assertAlmostEqual(d["bandgap"], 0.6506)
            self.assertFalse(d["is_gap_direct"])
            self.assertFalse(d["is_metal"])
            self.assertIn("bandstructure", doc["calcs_reversed"][0])

        doc = drone.assimilate(self.Al)
        self.assertEqual(doc["composition_reduced"], {'Al': 1.0})
        self.assertEqual(doc["formula_pretty"], 'Al')
        self.assertEqual(doc["formula_anonymous"], 'A')
        for d in [doc["calcs_reversed"][0]["output"], doc["output"]]:
            self.assertIsNone(d["vbm"])
            self.assertIsNone(d["cbm"])
            self.assertEqual(d["bandgap"], 0.0)
            self.assertFalse(d["is_gap_direct"])
            self.assertTrue(d["is_metal"])
            self.assertEqual(
                doc["calcs_reversed"][0]["bandstructure"]["@class"],
                "BandStructureSymmLine")
示例#23
0
    def test_detect_output_file_paths(self):
        drone = VaspDrone()
        doc = drone.assimilate(self.Si_static)

        self.assertDictEqual(
            {
                'chgcar': 'CHGCAR.gz',
                'locpot': 'LOCPOT.gz',
                'aeccar0': 'AECCAR0.gz',
                'aeccar1': 'AECCAR1.gz',
                'aeccar2': 'AECCAR2.gz',
                'procar': 'PROCAR.gz',
                'wavecar': 'WAVECAR.gz'
            }, doc['calcs_reversed'][0]['output_file_paths'])

        doc = drone.assimilate(self.relax2)
        self.assertDictEqual(
            {
                'chgcar': 'CHGCAR.relax1.gz',
                'procar': 'PROCAR.relax1.gz',
                'wavecar': 'WAVECAR.relax1.gz'
            }, doc['calcs_reversed'][1]['output_file_paths'])
示例#24
0
    def test_parse_potcar(self):
        # by default, POTCAR should be loaded
        drone = VaspDrone()
        doc = drone.assimilate(self.Si_static)
        pot_spec = doc["calcs_reversed"][0]["input"]["potcar_spec"]
        self.assertIsNotNone(pot_spec[0]["hash"])  # check a hash was loaded

        # Force not loading of POTCAR
        drone = VaspDrone(parse_potcar_file=False)
        doc = drone.assimilate(self.Si_static)
        pot_spec = doc["calcs_reversed"][0]["input"]["potcar_spec"]
        self.assertIsNone(pot_spec[0]["hash"])  # check a hash was not loaded
示例#25
0
    def run_task(self, fw_spec):
        # get the directory that contains the VASP dir to parse
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"],
                                    fw_spec["calc_locs"])["path"]

        # parse the VASP directory
        logger.info("PARSING DIRECTORY: {}".format(calc_dir))

        drone = VaspDrone(additional_fields=self.get("additional_fields"),
                          parse_dos=self.get("parse_dos", False),
                          bandstructure_mode=self.get("bandstructure_mode",
                                                      False),
                          parse_chgcar=self.get("parse_chgcar", False),
                          parse_aeccar=self.get("parse_aeccar", False))

        # assimilate (i.e., parse)
        task_doc = drone.assimilate(calc_dir)

        # Check for additional keys to set based on the fw_spec
        if self.get("fw_spec_field"):
            task_doc.update(fw_spec[self.get("fw_spec_field")])

        # get the database connection
        db_file = env_chk(self.get('db_file'), fw_spec)

        # db insertion or taskdoc dump
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(task_doc, default=DATETIME_HANDLER))
        else:
            mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
            t_id = mmdb.insert_task(
                task_doc,
                use_gridfs=self.get("parse_dos", False)
                or bool(self.get("bandstructure_mode", False))
                or self.get("parse_chgcar", False)
                or self.get("parse_aeccar", False))
            logger.info("Finished parsing with task_id: {}".format(t_id))

        defuse_children = False
        if task_doc["state"] != "successful":
            defuse_unsuccessful = self.get("defuse_unsuccessful",
                                           DEFUSE_UNSUCCESSFUL)
            if defuse_unsuccessful is True:
                defuse_children = True
            elif defuse_unsuccessful is False:
                pass
            elif defuse_unsuccessful == "fizzle":
                raise RuntimeError(
                    "VaspToDb indicates that job is not successful "
                    "(perhaps your job did not converge within the "
                    "limit of electronic/ionic iterations)!")
            else:
                raise RuntimeError("Unknown option for defuse_unsuccessful: "
                                   "{}".format(defuse_unsuccessful))

        task_fields_to_push = self.get("task_fields_to_push", None)
        update_spec = {}
        if task_fields_to_push:
            if isinstance(task_fields_to_push, dict):
                for key, path_in_task_doc in task_fields_to_push.items():
                    if has(task_doc, path_in_task_doc):
                        update_spec[key] = get(task_doc, path_in_task_doc)
                    else:
                        logger.warn(
                            "Could not find {} in task document. Unable to push to next firetask/firework"
                            .format(path_in_task_doc))
            else:
                raise RuntimeError(
                    "Inappropriate type {} for task_fields_to_push. It must be a "
                    "dictionary of format: {key: path} where key refers to a field "
                    "in the spec and path is a full mongo-style path to a "
                    "field in the task document".format(
                        type(task_fields_to_push)))

        return FWAction(stored_data={"task_id": task_doc.get("task_id", None)},
                        defuse_children=defuse_children,
                        update_spec=update_spec)
示例#26
0
    def run_task(self, fw_spec):
        ref_struct = self['structure']
        d = {"analysis": {}, "initial_structure": self['structure'].as_dict()}

        # Get optimized structure
        calc_locs_opt = [
            cl for cl in fw_spec.get('calc_locs', [])
            if 'optimiz' in cl['name']
        ]
        if calc_locs_opt:
            optimize_loc = calc_locs_opt[-1]['path']
            logger.info("Parsing initial optimization directory: {}".format(
                optimize_loc))
            drone = VaspDrone()
            optimize_doc = drone.assimilate(optimize_loc)
            opt_struct = Structure.from_dict(
                optimize_doc["calcs_reversed"][0]["output"]["structure"])
            d.update({"optimized_structure": opt_struct.as_dict()})
            ref_struct = opt_struct
            eq_stress = -0.1 * Stress(optimize_doc["calcs_reversed"][0]
                                      ["output"]["ionic_steps"][-1]["stress"])
        else:
            eq_stress = None

        if self.get("fw_spec_field"):
            d.update({
                self.get("fw_spec_field"):
                fw_spec.get(self.get("fw_spec_field"))
            })

        # Get the stresses, strains, deformations from deformation tasks
        defo_dicts = fw_spec["deformation_tasks"].values()
        stresses, strains, deformations = [], [], []
        for defo_dict in defo_dicts:
            stresses.append(Stress(defo_dict["stress"]))
            strains.append(Strain(defo_dict["strain"]))
            deformations.append(Deformation(defo_dict["deformation_matrix"]))
            # Add derived stresses and strains if symmops is present
            for symmop in defo_dict.get("symmops", []):
                stresses.append(Stress(defo_dict["stress"]).transform(symmop))
                strains.append(Strain(defo_dict["strain"]).transform(symmop))
                deformations.append(
                    Deformation(
                        defo_dict["deformation_matrix"]).transform(symmop))

        stresses = [-0.1 * s for s in stresses]
        pk_stresses = [
            stress.piola_kirchoff_2(deformation)
            for stress, deformation in zip(stresses, deformations)
        ]

        d['fitting_data'] = {
            'cauchy_stresses': stresses,
            'eq_stress': eq_stress,
            'strains': strains,
            'pk_stresses': pk_stresses,
            'deformations': deformations
        }

        logger.info("Analyzing stress/strain data")
        # TODO: @montoyjh: what if it's a cubic system? don't need 6. -computron
        # TODO: Can add population method but want to think about how it should
        #           be done. -montoyjh
        order = self.get('order', 2)
        if order > 2:
            method = 'finite_difference'
        else:
            method = self.get('fitting_method', 'finite_difference')

        if method == 'finite_difference':
            result = ElasticTensorExpansion.from_diff_fit(strains,
                                                          pk_stresses,
                                                          eq_stress=eq_stress,
                                                          order=order)
            if order == 2:
                result = ElasticTensor(result[0])
        elif method == 'pseudoinverse':
            result = ElasticTensor.from_pseudoinverse(strains, pk_stresses)
        elif method == 'independent':
            result = ElasticTensor.from_independent_strains(
                strains, pk_stresses, eq_stress=eq_stress)
        else:
            raise ValueError(
                "Unsupported method, method must be finite_difference, "
                "pseudoinverse, or independent")

        ieee = result.convert_to_ieee(ref_struct)
        d.update({
            "elastic_tensor": {
                "raw": result.voigt,
                "ieee_format": ieee.voigt
            }
        })
        if order == 2:
            d.update({
                "derived_properties":
                ieee.get_structure_property_dict(ref_struct)
            })
        else:
            soec = ElasticTensor(ieee[0])
            d.update({
                "derived_properties":
                soec.get_structure_property_dict(ref_struct)
            })

        d["formula_pretty"] = ref_struct.composition.reduced_formula
        d["fitting_method"] = method
        d["order"] = order

        d = jsanitize(d)

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = VaspCalcDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("Elastic analysis complete.")

        return FWAction()
    def run_task(self, fw_spec):
        # get the directory that contains the VASP dir to parse
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"], fw_spec["calc_locs"])["path"]

        # parse the VASP directory
        logger.info("PARSING DIRECTORY: {}".format(calc_dir))
        # get the database connection
        db_file = env_chk(self.get('db_file'), fw_spec)

        drone = VaspDrone(additional_fields=self.get("additional_fields"),
                          parse_dos=self.get("parse_dos", False), compress_dos=1,
                          bandstructure_mode=self.get("bandstructure_mode", False), compress_bs=1)

        # assimilate (i.e., parse)
        task_doc = drone.assimilate(calc_dir)

        # Check for additional fields to add in the fw_spec
        if self.get("fw_spec_field"):
            task_doc.update(fw_spec[self.get("fw_spec_field")])

        # db insertion
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(task_doc, default=DATETIME_HANDLER))
        else:
            mmdb = MMVaspDb.from_db_file(db_file, admin=True)

            # insert dos into GridFS
            if self.get("parse_dos") and "calcs_reversed" in task_doc:
                for idx, x in enumerate(task_doc["calcs_reversed"]):
                    if "dos" in task_doc["calcs_reversed"][idx]:
                        if idx == 0:  # only store most recent DOS
                            dos = json.dumps(task_doc["calcs_reversed"][idx]["dos"], cls=MontyEncoder)
                            gfs_id, compression_type = mmdb.insert_gridfs(dos, "dos_fs")
                            task_doc["calcs_reversed"][idx]["dos_compression"] = compression_type
                            task_doc["calcs_reversed"][idx]["dos_fs_id"] = gfs_id
                        del task_doc["calcs_reversed"][idx]["dos"]

            # insert band structure into GridFS
            if self.get("bandstructure_mode") and "calcs_reversed" in task_doc:
                for idx, x in enumerate(task_doc["calcs_reversed"]):
                    if "bandstructure" in task_doc["calcs_reversed"][idx]:
                        if idx == 0:  # only store most recent band structure
                            bs = json.dumps(task_doc["calcs_reversed"][idx]["bandstructure"], cls=MontyEncoder)
                            gfs_id, compression_type = mmdb.insert_gridfs(bs, "bandstructure_fs")
                            task_doc["calcs_reversed"][idx]["bandstructure_compression"] = compression_type
                            task_doc["calcs_reversed"][idx]["bandstructure_fs_id"] = gfs_id
                        del task_doc["calcs_reversed"][idx]["bandstructure"]

            # insert the task document
            t_id = mmdb.insert(task_doc)

            logger.info("Finished parsing with task_id: {}".format(t_id))

        if self.get("defuse_unsuccessful", True):
            defuse_children = (task_doc["state"] != "successful")
        else:
            defuse_children = False

        return FWAction(stored_data={"task_id": task_doc.get("task_id", None)},
                        defuse_children=defuse_children)
示例#28
0
文件: utils.py 项目: nwinner/emmet
def parse_vasp_dirs(vaspdirs, tag, task_ids):
    process = multiprocessing.current_process()
    name = process.name
    chunk_idx = int(name.rsplit("-")[1]) - 1
    logger.info(f"{name} starting.")
    tags = [tag, SETTINGS.year_tags[-1]]
    ctx = click.get_current_context()
    spec_or_dbfile = ctx.parent.parent.params["spec_or_dbfile"]
    target = calcdb_from_mgrant(spec_or_dbfile)
    sbxn = list(filter(None, target.collection.distinct("sbxn")))
    logger.info(f"Using sandboxes {sbxn}.")
    no_dupe_check = ctx.parent.parent.params["no_dupe_check"]
    run = ctx.parent.parent.params["run"]
    projection = {"tags": 1, "task_id": 1}
    count = 0
    drone = VaspDrone(
        additional_fields={"tags": tags},
        store_volumetric_data=ctx.params['store_volumetric_data'])

    for vaspdir in vaspdirs:
        logger.info(f"{name} VaspDir: {vaspdir}")
        launcher = get_subdir(vaspdir)
        query = {"dir_name": {"$regex": launcher}}
        docs = list(
            target.collection.find(query,
                                   projection).sort([("_id", -1)]).limit(1))

        if docs:
            if no_dupe_check:
                logger.warning(f"FORCING re-parse of {launcher}!")
            else:
                if run:
                    shutil.rmtree(vaspdir)
                    logger.warning(
                        f"{name} {launcher} already parsed -> removed.")
                else:
                    logger.warning(
                        f"{name} {launcher} already parsed -> would remove.")
                continue

        try:
            task_doc = drone.assimilate(vaspdir)
        except Exception as ex:
            logger.error(f"Failed to assimilate {vaspdir}: {ex}")
            continue
        task_doc["sbxn"] = sbxn
        manual_taskid = isinstance(task_ids, dict)
        task_id = task_ids[launcher] if manual_taskid else task_ids[chunk_idx][
            count]
        task_doc["task_id"] = task_id
        logger.info(f"Using {task_id} for {launcher}.")

        if docs:
            # make sure that task gets the same tags as the previously parsed task
            if docs[0]["tags"]:
                task_doc["tags"] += docs[0]["tags"]
                logger.info(
                    f"Adding existing tags {docs[0]['tags']} to {tags}.")

        if run:
            if task_doc["state"] == "successful":
                if docs and no_dupe_check:
                    target.collection.remove({"task_id": task_id})
                    logger.warning(
                        f"Removed previously parsed task {task_id}!")

                try:
                    target.insert_task(task_doc, use_gridfs=True)
                except DocumentTooLarge:
                    output = dotty(task_doc["calcs_reversed"][0]["output"])
                    pop_keys = [
                        "normalmode_eigenvecs", "force_constants",
                        "outcar.onsite_density_matrices"
                    ]

                    for k in pop_keys:
                        if k not in output:
                            continue

                        logger.warning(f"{name} Remove {k} and retry ...")
                        output.pop(k)
                        try:
                            target.insert_task(task_doc, use_gridfs=True)
                            break
                        except DocumentTooLarge:
                            continue
                    else:
                        logger.warning(
                            f"{name} failed to reduce document size")
                        continue

                if target.collection.count(query):
                    shutil.rmtree(vaspdir)
                    logger.info(
                        f"{name} Successfully parsed and removed {launcher}.")
                    count += 1
        else:
            count += 1

    return count
示例#29
0
文件: utils.py 项目: rkingsbury/emmet
def parse_vasp_dirs(vaspdirs, tag, task_ids, snl_metas):  # noqa: C901
    process = multiprocessing.current_process()
    name = process.name
    chunk_idx = int(name.rsplit("-")[1]) - 1
    logger.info(f"{name} starting.")
    tags = [tag, SETTINGS.year_tags[-1]]
    ctx = click.get_current_context()
    spec_or_dbfile = ctx.parent.parent.params["spec_or_dbfile"]
    target = calcdb_from_mgrant(spec_or_dbfile)
    snl_collection = target.db.snls_user
    sbxn = list(filter(None, target.collection.distinct("sbxn")))
    logger.info(f"Using sandboxes {sbxn}.")
    no_dupe_check = ctx.parent.parent.params["no_dupe_check"]
    run = ctx.parent.parent.params["run"]
    projection = {"tags": 1, "task_id": 1}
    count = 0
    drone = VaspDrone(
        additional_fields={"tags": tags},
        store_volumetric_data=ctx.params["store_volumetric_data"],
    )

    for vaspdir in vaspdirs:
        logger.info(f"{name} VaspDir: {vaspdir}")
        launcher = get_subdir(vaspdir)
        query = {"dir_name": {"$regex": launcher}}
        docs = list(
            target.collection.find(query,
                                   projection).sort([("_id", -1)]).limit(1))

        if docs:
            if no_dupe_check:
                logger.warning(f"FORCING re-parse of {launcher}!")
            else:
                if run:
                    shutil.rmtree(vaspdir)
                    logger.warning(
                        f"{name} {launcher} already parsed -> removed.")
                else:
                    logger.warning(
                        f"{name} {launcher} already parsed -> would remove.")
                continue

        try:
            task_doc = drone.assimilate(vaspdir)
        except Exception as ex:
            logger.error(f"Failed to assimilate {vaspdir}: {ex}")
            continue

        task_doc["sbxn"] = sbxn
        manual_taskid = isinstance(task_ids, dict)
        snl_metas_avail = isinstance(snl_metas, dict)
        task_id = task_ids[launcher] if manual_taskid else task_ids[chunk_idx][
            count]
        task_doc["task_id"] = task_id
        logger.info(f"Using {task_id} for {launcher}.")

        if docs:
            # make sure that task gets the same tags as the previously parsed task
            # (run through set to implicitly remove duplicate tags)
            if docs[0]["tags"]:
                existing_tags = list(set(docs[0]["tags"]))
                task_doc["tags"] += existing_tags
                logger.info(f"Adding existing tags {existing_tags} to {tags}.")

        snl_dct = None
        if snl_metas_avail:
            snl_meta = snl_metas.get(launcher)
            if snl_meta:
                references = snl_meta.get("references")
                authors = snl_meta.get(
                    "authors",
                    ["Materials Project <*****@*****.**>"])
                kwargs = {"projects": [tag]}
                if references:
                    kwargs["references"] = references

                struct = Structure.from_dict(task_doc["input"]["structure"])
                snl = StructureNL(struct, authors, **kwargs)
                snl_dct = snl.as_dict()
                snl_dct.update(get_meta_from_structure(struct))
                snl_id = snl_meta["snl_id"]
                snl_dct["snl_id"] = snl_id
                logger.info(f"Created SNL object for {snl_id}.")

        if run:
            if task_doc["state"] == "successful":
                if docs and no_dupe_check:
                    target.collection.remove({"task_id": task_id})
                    logger.warning(
                        f"Removed previously parsed task {task_id}!")

                try:
                    target.insert_task(task_doc, use_gridfs=True)
                except DocumentTooLarge:
                    output = dotty(task_doc["calcs_reversed"][0]["output"])
                    pop_keys = [
                        "normalmode_eigenvecs",
                        "force_constants",
                        "outcar.onsite_density_matrices",
                    ]

                    for k in pop_keys:
                        if k not in output:
                            continue

                        logger.warning(f"{name} Remove {k} and retry ...")
                        output.pop(k)
                        try:
                            target.insert_task(task_doc, use_gridfs=True)
                            break
                        except DocumentTooLarge:
                            continue
                    else:
                        logger.warning(
                            f"{name} failed to reduce document size")
                        continue

                if target.collection.count(query):
                    if snl_dct:
                        result = snl_collection.insert_one(snl_dct)
                        logger.info(
                            f"SNL {result.inserted_id} inserted into {snl_collection.full_name}."
                        )

                    shutil.rmtree(vaspdir)
                    logger.info(
                        f"{name} Successfully parsed and removed {launcher}.")
                    count += 1
        else:
            count += 1

    return count
示例#30
0
    def run_task(self, fw_spec):
        d = {
            "analysis": {},
            "deformation_tasks": fw_spec["deformation_tasks"],
            "initial_structure": self['structure'].as_dict()
        }

        # Get optimized structure
        calc_locs_opt = [
            cl for cl in fw_spec['calc_locs'] if 'optimize' in cl['name']
        ]
        if calc_locs_opt:
            optimize_loc = calc_locs_opt[-1]['path']
            logger.info("Parsing initial optimization directory: {}".format(
                optimize_loc))
            drone = VaspDrone()
            optimize_doc = drone.assimilate(optimize_loc)
            opt_struct = Structure.from_dict(
                optimize_doc["calcs_reversed"][0]["output"]["structure"])
            d.update({"optimized_structure": opt_struct.as_dict()})

        # TODO: @montoyjh: does the below have anything to do with elastic tensor? If not, try
        # the more general fw_spec_field approach in the VaspToDb rather than hard-coding the
        # tags insertion here. -computron
        if fw_spec.get("tags", None):
            d["tags"] = fw_spec["tags"]

        results = fw_spec["deformation_tasks"].values()
        defos = [r["deformation_matrix"] for r in results]
        stresses = [r["stress"] for r in results]
        strains = np.array([Strain(r["strain"]).voigt for r in results])
        stress_dict = {
            IndependentStrain(defo): Stress(stress)
            for defo, stress in zip(defos, stresses)
        }

        logger.info("Analyzing stress/strain data")
        # Determine if we have 6 unique deformations
        # TODO: @montoyjh: what if it's a cubic system? don't need 6. -computron
        if np.linalg.matrix_rank(strains) == 6:
            # Perform Elastic tensor fitting and analysis
            result = ElasticTensor.from_stress_dict(stress_dict)
            d["elastic_tensor"] = result.voigt.tolist()
            d.update(result.property_dict)

        else:
            raise ValueError("Fewer than 6 unique deformations")

        d["state"] = "successful"

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = VaspCalcDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("Elastic analysis complete.")
        return FWAction()
示例#31
0
    def run_task(self, fw_spec):
        ref_struct = self['structure']
        d = {
            "analysis": {},
            "initial_structure": self['structure'].as_dict()
        }

        # Get optimized structure
        calc_locs_opt = [cl for cl in fw_spec.get('calc_locs', []) if 'optimiz' in cl['name']]
        if calc_locs_opt:
            optimize_loc = calc_locs_opt[-1]['path']
            logger.info("Parsing initial optimization directory: {}".format(optimize_loc))
            drone = VaspDrone()
            optimize_doc = drone.assimilate(optimize_loc)
            opt_struct = Structure.from_dict(optimize_doc["calcs_reversed"][0]["output"]["structure"])
            d.update({"optimized_structure": opt_struct.as_dict()})
            ref_struct = opt_struct
            eq_stress = -0.1*Stress(optimize_doc["calcs_reversed"][0]["output"]["ionic_steps"][-1]["stress"])
        else:
            eq_stress = None

        if self.get("fw_spec_field"):
            d.update({self.get("fw_spec_field"): fw_spec.get(self.get("fw_spec_field"))})

        # Get the stresses, strains, deformations from deformation tasks
        defo_dicts = fw_spec["deformation_tasks"].values()
        stresses, strains, deformations = [], [], []
        for defo_dict in defo_dicts:
            stresses.append(Stress(defo_dict["stress"]))
            strains.append(Strain(defo_dict["strain"]))
            deformations.append(Deformation(defo_dict["deformation_matrix"]))
            # Add derived stresses and strains if symmops is present
            for symmop in defo_dict.get("symmops", []):
                stresses.append(Stress(defo_dict["stress"]).transform(symmop))
                strains.append(Strain(defo_dict["strain"]).transform(symmop))
                deformations.append(Deformation(defo_dict["deformation_matrix"]).transform(symmop))

        stresses = [-0.1*s for s in stresses]
        pk_stresses = [stress.piola_kirchoff_2(deformation)
                       for stress, deformation in zip(stresses, deformations)]

        d['fitting_data'] = {'cauchy_stresses': stresses,
                             'eq_stress': eq_stress,
                             'strains': strains,
                             'pk_stresses': pk_stresses,
                             'deformations': deformations
                             }

        logger.info("Analyzing stress/strain data")
        # TODO: @montoyjh: what if it's a cubic system? don't need 6. -computron
        # TODO: Can add population method but want to think about how it should
        #           be done. -montoyjh
        order = self.get('order', 2)
        if order > 2:
            method = 'finite_difference'
        else:
            method = self.get('fitting_method', 'finite_difference')

        if method == 'finite_difference':
            result = ElasticTensorExpansion.from_diff_fit(
                    strains, pk_stresses, eq_stress=eq_stress, order=order)
            if order == 2:
                result = ElasticTensor(result[0])
        elif method == 'pseudoinverse':
            result = ElasticTensor.from_pseudoinverse(strains, pk_stresses)
        elif method == 'independent':
            result = ElasticTensor.from_independent_strains(strains, pk_stresses, eq_stress=eq_stress)
        else:
            raise ValueError("Unsupported method, method must be finite_difference, "
                             "pseudoinverse, or independent")

        ieee = result.convert_to_ieee(ref_struct)
        d.update({
            "elastic_tensor": {
                "raw": result.voigt,
                "ieee_format": ieee.voigt
            }
        })
        if order == 2:
            d.update({"derived_properties": ieee.get_structure_property_dict(ref_struct)})
        else:
            soec = ElasticTensor(ieee[0])
            d.update({"derived_properties": soec.get_structure_property_dict(ref_struct)})

        d["formula_pretty"] = ref_struct.composition.reduced_formula
        d["fitting_method"] = method
        d["order"] = order

        d = jsanitize(d)

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = VaspCalcDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("Elastic analysis complete.")
        
        return FWAction()
示例#32
0
    def run_task(self, fw_spec):
        # get the directory that contains the VASP dir to parse
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"], fw_spec["calc_locs"])["path"]

        # parse the VASP directory
        logger.info("PARSING DIRECTORY: {}".format(calc_dir))

        drone = VaspDrone(additional_fields=self.get("additional_fields"),
                          parse_dos=self.get("parse_dos", False),
                          bandstructure_mode=self.get("bandstructure_mode", False),
                          parse_chgcar=self.get("parse_chgcar", False),
                          parse_aeccar=self.get("parse_aeccar", False))

        # assimilate (i.e., parse)
        task_doc = drone.assimilate(calc_dir)

        # Check for additional keys to set based on the fw_spec
        if self.get("fw_spec_field"):
            task_doc.update(fw_spec[self.get("fw_spec_field")])

        # get the database connection
        db_file = env_chk(self.get('db_file'), fw_spec)

        # db insertion or taskdoc dump
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(task_doc, default=DATETIME_HANDLER))
        else:
            mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
            t_id = mmdb.insert_task(
                task_doc, use_gridfs=self.get("parse_dos", False)
                or bool(self.get("bandstructure_mode", False))
                or self.get("parse_chgcar", False)
                or self.get("parse_aeccar", False))
            logger.info("Finished parsing with task_id: {}".format(t_id))

        defuse_children = False
        if task_doc["state"] != "successful":
            defuse_unsuccessful = self.get("defuse_unsuccessful",
                                           DEFUSE_UNSUCCESSFUL)
            if defuse_unsuccessful is True:
                defuse_children = True
            elif defuse_unsuccessful is False:
                pass
            elif defuse_unsuccessful == "fizzle":
                raise RuntimeError(
                    "VaspToDb indicates that job is not successful "
                    "(perhaps your job did not converge within the "
                    "limit of electronic/ionic iterations)!")
            else:
                raise RuntimeError("Unknown option for defuse_unsuccessful: "
                                   "{}".format(defuse_unsuccessful))

        task_fields_to_push = self.get("task_fields_to_push", None)
        update_spec = {}
        if task_fields_to_push:
            if isinstance(task_fields_to_push, dict):
                for key, path_in_task_doc in task_fields_to_push.items():
                    if has(task_doc, path_in_task_doc):
                        update_spec[key] = get(task_doc, path_in_task_doc)
                    else:
                        logger.warn("Could not find {} in task document. Unable to push to next firetask/firework".format(path_in_task_doc))
            else:
                raise RuntimeError("Inappropriate type {} for task_fields_to_push. It must be a "
                                   "dictionary of format: {key: path} where key refers to a field "
                                   "in the spec and path is a full mongo-style path to a "
                                   "field in the task document".format(type(task_fields_to_push)))

        return FWAction(stored_data={"task_id": task_doc.get("task_id", None)},
                        defuse_children=defuse_children, update_spec=update_spec)