Exemplo n.º 1
0
 def test_get_strain_state_dict(self):
     strain_inds = [(0,), (1,), (2,), (1, 3), (1, 2, 3)]
     vecs = {}
     strain_states = []
     for strain_ind in strain_inds:
         ss = np.zeros(6)
         np.put(ss, strain_ind, 1)
         strain_states.append(tuple(ss))
         vec = np.zeros((4, 6))
         rand_values = np.random.uniform(0.1, 1, 4)
         for i in strain_ind:
             vec[:, i] = rand_values
         vecs[strain_ind] = vec
     all_strains = [Strain.from_voigt(v).zeroed() for vec in vecs.values()
                    for v in vec]
     random.shuffle(all_strains)
     all_stresses = [Stress.from_voigt(np.random.random(6)).zeroed()
                     for s in all_strains]
     strain_dict = {k.tostring():v for k,v in zip(all_strains, all_stresses)}
     ss_dict = get_strain_state_dict(all_strains, all_stresses, add_eq=False)
     # Check length of ss_dict
     self.assertEqual(len(strain_inds), len(ss_dict))
     # Check sets of strain states are correct
     self.assertEqual(set(strain_states), set(ss_dict.keys()))
     for strain_state, data in ss_dict.items():
         # Check correspondence of strains/stresses
         for strain, stress in zip(data["strains"], data["stresses"]):
             self.assertArrayAlmostEqual(Stress.from_voigt(stress), 
                                         strain_dict[Strain.from_voigt(strain).tostring()])
Exemplo n.º 2
0
class StressTest(PymatgenTest):
    def setUp(self):
        self.rand_stress = Stress(np.random.randn(3, 3))
        self.symm_stress = Stress([[0.51, 2.29, 2.42],
                                   [2.29, 5.14, 5.07],
                                   [2.42, 5.07, 5.33]])
        self.non_symm = Stress([[0.1, 0.2, 0.3],
                                [0.4, 0.5, 0.6],
                                [0.2, 0.5, 0.5]])

    def test_properties(self):
        # mean_stress
        self.assertEqual(self.rand_stress.mean_stress,
                         1. / 3. * (self.rand_stress[0, 0] +
                                    self.rand_stress[1, 1] +
                                    self.rand_stress[2, 2]))
        self.assertAlmostEqual(self.symm_stress.mean_stress, 3.66)
        # deviator_stress
        self.assertArrayAlmostEqual(self.symm_stress.deviator_stress,
                                    Stress([[-3.15, 2.29, 2.42],
                                            [2.29, 1.48, 5.07],
                                            [2.42, 5.07, 1.67]]))
        self.assertArrayAlmostEqual(self.non_symm.deviator_stress,
                                    [[-0.2666666667, 0.2, 0.3],
                                     [0.4, 0.133333333, 0.6],
                                     [0.2, 0.5, 0.133333333]])
        # deviator_principal_invariants
        self.assertArrayAlmostEqual(self.symm_stress.dev_principal_invariants,
                                    [0, 44.2563, 111.953628])
        # von_mises
        self.assertAlmostEqual(self.symm_stress.von_mises,
                               11.52253878275)
        # piola_kirchoff 1, 2
        f = Deformation.from_index_amount((0, 1), 0.03)
        self.assertArrayAlmostEqual(self.symm_stress.piola_kirchoff_1(f),
                                    [[0.4413, 2.29, 2.42],
                                     [2.1358, 5.14, 5.07],
                                     [2.2679, 5.07, 5.33]])
        self.assertArrayAlmostEqual(self.symm_stress.piola_kirchoff_2(f),
                                    [[0.377226, 2.1358, 2.2679],
                                     [2.1358, 5.14, 5.07],
                                     [2.2679, 5.07, 5.33]])
        # voigt
        self.assertArrayEqual(self.symm_stress.voigt,
                              [0.51, 5.14, 5.33, 5.07, 2.42, 2.29])
        with warnings.catch_warnings(record=True) as w:
            self.non_symm.voigt
            self.assertEqual(len(w), 1)
Exemplo n.º 3
0
 def setUp(self):
     self.rand_stress = Stress(np.random.randn(3, 3))
     self.symm_stress = Stress([[0.51, 2.29, 2.42],
                                [2.29, 5.14, 5.07],
                                [2.42, 5.07, 5.33]])
     self.non_symm = Stress([[0.1, 0.2, 0.3],
                             [0.4, 0.5, 0.6],
                             [0.2, 0.5, 0.5]])
Exemplo n.º 4
0
import pymatgen as mg
from pymatgen.analysis.elasticity.strain import DeformedStructureSet
import os
from shutil import copyfile
from pymatgen.io.vasp.outputs import Vasprun
from pymatgen.analysis.elasticity.stress import Stress
from pymatgen.analysis.elasticity.elastic import ElasticTensor

structure = mg.Structure.from_file("POSCAR")
def_set = DeformedStructureSet(structure)
strains = def_set.as_strain_dict()

calculations = []
for x in range(1, 25):
    calculations.append('poscar%s' % x)

match_dict = {}
for calc in calculations:
    struct = mg.Structure.from_file(calc + "/POSCAR")
    vrun = Vasprun(calc + '/vasprun.xml', parse_dos=False, parse_eigen=False)
    stress = Stress(vrun.ionic_steps[-1]['stress'])
    for strain in strains:
        if strains[strain].lattice == struct.lattice:
            match_dict[strain] = stress
elastics = ElasticTensor.from_stress_dict(match_dict)
with open("elasts.txt", 'w') as f:
    f.write(str(elastics.voigt) + '\n')
    f.write(str(elastics.k_voigt) + '\n')
Exemplo n.º 5
0
def get_elastic_analysis(opt_task, defo_tasks):
    """
    Performs the analysis of opt_tasks and defo_tasks necessary for
    an elastic analysis

    Args:
        opt_task: task doc corresponding to optimization
        defo_tasks: task_doc corresponding to deformations

    Returns:
        elastic document with fitted elastic tensor and analysis

    """
    elastic_doc = {"warnings": []}
    opt_struct = Structure.from_dict(opt_task['output']['structure'])
    d_structs = [
        Structure.from_dict(d['output']['structure']) for d in defo_tasks
    ]
    defos = [
        calculate_deformation(opt_struct, def_structure)
        for def_structure in d_structs
    ]

    # Warning if deformation is not equivalent to stored deformation
    stored_defos = [d['transmuter']['transformation_params'][0]\
                     ['deformation'] for d in defo_tasks]
    if not np.allclose(defos, stored_defos, atol=1e-5):
        wmsg = "Inequivalent stored and calc. deformations."
        logger.warning(wmsg)
        elastic_doc["warnings"].append(wmsg)

    # Collect all fitting data and task ids
    defos = [Deformation(d) for d in defos]
    strains = [d.green_lagrange_strain for d in defos]
    vasp_stresses = [d['output']['stress'] for d in defo_tasks]
    cauchy_stresses = [-0.1 * Stress(s) for s in vasp_stresses]
    pk_stresses = [
        Stress(s.piola_kirchoff_2(d)) for s, d in zip(cauchy_stresses, defos)
    ]
    defo_task_ids = [d['task_id'] for d in defo_tasks]

    # Determine whether data is sufficient to fit tensor
    # If raw data is insufficient but can be symmetrically transformed
    # to provide a sufficient set, use the expanded set with appropriate
    # symmetry transformations, fstresses/strains are "fitting
    # strains" below.
    vstrains = [s.voigt for s in strains]
    if np.linalg.matrix_rank(vstrains) < 6:
        symmops = SpacegroupAnalyzer(opt_struct).get_symmetry_operations()
        fstrains = [[s.transform(symmop) for symmop in symmops]
                    for s in strains]
        fstrains = list(chain.from_iterable(fstrains))
        vfstrains = [s.voigt for s in fstrains]
        if not np.linalg.matrix_rank(vfstrains) == 6:
            logger.warning("Insufficient data to form SOEC")
            elastic_doc['warnings'].append("insufficient strains")
            return None
        else:
            fstresses = [[s.transform(symmop) for symmop in symmops]
                         for s in pk_stresses]
            fstresses = list(chain.from_iterable(fstresses))
    else:
        fstrains = strains
        fstresses = pk_stresses

    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        if len(cauchy_stresses) == 24:
            elastic_doc['legacy_fit'] = legacy_fit(strains, cauchy_stresses)
        et_raw = ElasticTensor.from_pseudoinverse(fstrains, fstresses)
        et = et_raw.voigt_symmetrized.convert_to_ieee(opt_struct)
        defo_tasks = sorted(defo_tasks, key=lambda x: x['completed_at'])
        vasp_input = opt_task['input']
        vasp_input.pop('structure')

        elastic_doc.update({
            "deformation_task_ids": defo_task_ids,
            "optimization_task_id": opt_task['task_id'],
            "pk_stresses": pk_stresses,
            "cauchy_stresses": cauchy_stresses,
            "strains": strains,
            "deformations": defos,
            "elastic_tensor": et.voigt,
            "elastic_tensor_raw": et_raw.voigt,
            "optimized_structure": opt_struct,
            "completed_at": defo_tasks[-1]['completed_at'],
            "optimization_input": vasp_input
        })

    # Process input
    elastic_doc['warnings'] = get_warnings(et, opt_struct) or None
    #TODO: process MPWorks metadata?
    #TODO: higher order
    #TODO: add some of the relevant DFT params, kpoints
    elastic_doc['state'] = "filter_failed" if elastic_doc['warnings']\
        else "successful"
    return elastic_doc
Exemplo n.º 6
0
    def run_task(self, fw_spec):
        ref_struct = self['structure']
        d = {
            "analysis": {},
            "initial_structure": self['structure'].as_dict()
        }

        # Get optimized structure
        calc_locs_opt = [cl for cl in fw_spec.get('calc_locs', []) if 'optimiz' in cl['name']]
        if calc_locs_opt:
            optimize_loc = calc_locs_opt[-1]['path']
            logger.info("Parsing initial optimization directory: {}".format(optimize_loc))
            drone = VaspDrone()
            optimize_doc = drone.assimilate(optimize_loc)
            opt_struct = Structure.from_dict(optimize_doc["calcs_reversed"][0]["output"]["structure"])
            d.update({"optimized_structure": opt_struct.as_dict()})
            ref_struct = opt_struct
            eq_stress = -0.1*Stress(optimize_doc["calcs_reversed"][0]["output"]["ionic_steps"][-1]["stress"])
        else:
            eq_stress = None

        if self.get("fw_spec_field"):
            d.update({self.get("fw_spec_field"): fw_spec.get(self.get("fw_spec_field"))})

        # Get the stresses, strains, deformations from deformation tasks
        defo_dicts = fw_spec["deformation_tasks"].values()
        stresses, strains, deformations = [], [], []
        for defo_dict in defo_dicts:
            stresses.append(Stress(defo_dict["stress"]))
            strains.append(Strain(defo_dict["strain"]))
            deformations.append(Deformation(defo_dict["deformation_matrix"]))
            # Add derived stresses and strains if symmops is present
            for symmop in defo_dict.get("symmops", []):
                stresses.append(Stress(defo_dict["stress"]).transform(symmop))
                strains.append(Strain(defo_dict["strain"]).transform(symmop))
                deformations.append(Deformation(defo_dict["deformation_matrix"]).transform(symmop))

        stresses = [-0.1*s for s in stresses]
        pk_stresses = [stress.piola_kirchoff_2(deformation)
                       for stress, deformation in zip(stresses, deformations)]

        d['fitting_data'] = {'cauchy_stresses': stresses,
                             'eq_stress': eq_stress,
                             'strains': strains,
                             'pk_stresses': pk_stresses,
                             'deformations': deformations
                             }

        logger.info("Analyzing stress/strain data")
        # TODO: @montoyjh: what if it's a cubic system? don't need 6. -computron
        # TODO: Can add population method but want to think about how it should
        #           be done. -montoyjh
        order = self.get('order', 2)
        if order > 2:
            method = 'finite_difference'
        else:
            method = self.get('fitting_method', 'finite_difference')

        if method == 'finite_difference':
            result = ElasticTensorExpansion.from_diff_fit(
                    strains, pk_stresses, eq_stress=eq_stress, order=order)
            if order == 2:
                result = ElasticTensor(result[0])
        elif method == 'pseudoinverse':
            result = ElasticTensor.from_pseudoinverse(strains, pk_stresses)
        elif method == 'independent':
            result = ElasticTensor.from_independent_strains(strains, pk_stresses, eq_stress=eq_stress)
        else:
            raise ValueError("Unsupported method, method must be finite_difference, "
                             "pseudoinverse, or independent")

        ieee = result.convert_to_ieee(ref_struct)
        d.update({
            "elastic_tensor": {
                "raw": result.voigt,
                "ieee_format": ieee.voigt
            }
        })
        if order == 2:
            d.update({"derived_properties": ieee.get_structure_property_dict(ref_struct)})
        else:
            soec = ElasticTensor(ieee[0])
            d.update({"derived_properties": soec.get_structure_property_dict(ref_struct)})

        d["formula_pretty"] = ref_struct.composition.reduced_formula
        d["fitting_method"] = method
        d["order"] = order

        d = jsanitize(d)

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = VaspCalcDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("Elastic analysis complete.")
        
        return FWAction()
Exemplo n.º 7
0
def process_elastic_calcs(opt_doc, defo_docs, add_derived=True, tol=0.002):
    """
    Generates the list of calcs from deformation docs, along with 'derived
    stresses', i. e. stresses derived from symmop transformations of existing
    calcs from transformed strains resulting in an independent strain
    not in the input list

    Args:
        opt_doc (dict): document for optimization task
        defo_docs ([dict]) list of documents for deformation tasks
        add_derived (bool): flag for whether or not to add derived
            stress-strain pairs based on symmetry
        tol (float): tolerance for assigning equivalent stresses/strains

    Returns ([dict], [dict]):
        Two lists of summary documents corresponding to strains
        and stresses, one explicit and one derived
    """
    structure = Structure.from_dict(opt_doc['output']['structure'])
    input_structure = Structure.from_dict(opt_doc['input']['structure'])

    # Process explicit calcs, store in dict keyed by strain
    explicit_calcs = TensorMapping()
    for doc in defo_docs:
        calc = {
            "type": "explicit",
            "input": doc["input"],
            "output": doc["output"],
            "task_id": doc["task_id"],
            "completed_at": doc["completed_at"]
        }
        deformed_structure = Structure.from_dict(doc['output']['structure'])
        defo = Deformation(calculate_deformation(structure,
                                                 deformed_structure))
        # Warning if deformation is not equivalent to stored deformation
        stored_defo = doc['transmuter']['transformation_params'][0]\
            ['deformation']
        if not np.allclose(defo, stored_defo, atol=1e-5):
            wmsg = "Inequivalent stored and calc. deformations."
            logger.debug(wmsg)
            calc["warnings"] = wmsg
        cauchy_stress = -0.1 * Stress(doc['output']['stress'])
        pk_stress = cauchy_stress.piola_kirchoff_2(defo)
        strain = defo.green_lagrange_strain
        calc.update({
            "deformation": defo,
            "cauchy_stress": cauchy_stress,
            "strain": strain,
            "pk_stress": pk_stress
        })
        if strain in explicit_calcs:
            existing_value = explicit_calcs[strain]
            if doc['completed_at'] > existing_value['completed_at']:
                explicit_calcs[strain] = calc
        else:
            explicit_calcs[strain] = calc

    if not add_derived:
        return explicit_calcs.values(), None

    # Determine all of the implicit calculations to include
    sga = SpacegroupAnalyzer(structure, symprec=0.1)
    symmops = sga.get_symmetry_operations(cartesian=True)
    derived_calcs_by_strain = TensorMapping(tol=0.002)
    for strain, calc in explicit_calcs.items():
        # Generate all transformed strains
        task_id = calc['task_id']
        tstrains = [(symmop, strain.transform(symmop)) for symmop in symmops]
        # Filter strains by those which are independent and new
        # For second order
        if len(explicit_calcs) < 30:
            tstrains = [(symmop, tstrain) for symmop, tstrain in tstrains
                        if tstrain.get_deformation_matrix().is_independent(tol)
                        and not tstrain in explicit_calcs]
        # For third order
        else:
            strain_states = get_default_strain_states(3)
            # Default stencil in atomate, this maybe shouldn't be hard-coded
            stencil = np.linspace(-0.075, 0.075, 7)
            valid_strains = [
                Strain.from_voigt(s * np.array(strain_state))
                for s, strain_state in product(stencil, strain_states)
            ]
            valid_strains = [v for v in valid_strains if not np.allclose(v, 0)]
            valid_strains = TensorMapping(valid_strains,
                                          [True] * len(valid_strains))
            tstrains = [
                (symmop, tstrain) for symmop, tstrain in tstrains
                if tstrain in valid_strains and not tstrain in explicit_calcs
            ]
        # Add surviving tensors to derived_strains dict
        for symmop, tstrain in tstrains:
            # curr_set = derived_calcs_by_strain[tstrain]
            if tstrain in derived_calcs_by_strain:
                curr_set = derived_calcs_by_strain[tstrain]
                curr_task_ids = [c[1] for c in curr_set]
                if task_id not in curr_task_ids:
                    curr_set.append((symmop, calc['task_id']))
            else:
                derived_calcs_by_strain[tstrain] = [(symmop, calc['task_id'])]

    # Process derived calcs
    explicit_calcs_by_id = {d['task_id']: d for d in explicit_calcs.values()}
    derived_calcs = []
    for strain, calc_set in derived_calcs_by_strain.items():
        symmops, task_ids = zip(*calc_set)
        task_strains = [
            Strain(explicit_calcs_by_id[task_id]['strain'])
            for task_id in task_ids
        ]
        task_stresses = [
            explicit_calcs_by_id[task_id]['cauchy_stress']
            for task_id in task_ids
        ]
        derived_strains = [
            tstrain.transform(symmop)
            for tstrain, symmop in zip(task_strains, symmops)
        ]
        for derived_strain in derived_strains:
            if not np.allclose(derived_strain, strain, atol=2e-3):
                logger.info("Issue with derived strains")
                raise ValueError("Issue with derived strains")
        derived_stresses = [
            tstress.transform(sop)
            for sop, tstress in zip(symmops, task_stresses)
        ]
        input_docs = [{
            "task_id": task_id,
            "strain": task_strain,
            "cauchy_stress": task_stress,
            "symmop": symmop
        } for task_id, task_strain, task_stress, symmop in zip(
            task_ids, task_strains, task_stresses, symmops)]
        calc = {
            "strain": strain,
            "cauchy_stress": Stress(np.average(derived_stresses, axis=0)),
            "deformation": strain.get_deformation_matrix(),
            "input_tasks": input_docs,
            "type": "derived"
        }
        calc['pk_stress'] = calc['cauchy_stress'].piola_kirchoff_2(
            calc['deformation'])
        derived_calcs.append(calc)

    return list(explicit_calcs.values()), derived_calcs
Exemplo n.º 8
0
def get_elastic_analysis(opt_task, defo_tasks):
    """
    Performs the analysis of opt_tasks and defo_tasks necessary for
    an elastic analysis

    Args:
        opt_task: task doc corresponding to optimization
        defo_tasks: task_doc corresponding to deformations

    Returns:
        elastic document with fitted elastic tensor and analysis

    """
    elastic_doc = {"warnings": []}
    opt_struct = Structure.from_dict(opt_task['output']['structure'])
    input_struct = Structure.from_dict(opt_task['input']['structure'])
    # For now, discern order (i.e. TOEC) using parameters from optimization
    # TODO: figure this out more intelligently
    diff = get(opt_task, "input.incar.EDIFFG", 0)
    order = 3 if np.isclose(diff, -0.001) else 2
    explicit, derived = process_elastic_calcs(opt_task, defo_tasks)
    all_calcs = explicit + derived
    stresses = [c.get("cauchy_stress") for c in all_calcs]
    pk_stresses = [c.get("pk_stress") for c in all_calcs]
    strains = [c.get("strain") for c in all_calcs]
    elastic_doc['calculations'] = all_calcs
    vstrains = [s.zeroed(0.002).voigt for s in strains]
    if np.linalg.matrix_rank(vstrains) == 6:
        if order == 2:
            et_fit = legacy_fit(strains, stresses)
        elif order == 3:
            # Test for TOEC
            if len(strains) < 70:
                logger.info("insufficient valid strains for {} TOEC".format(
                    opt_task['formula_pretty']))
                return None
            eq_stress = -0.1 * Stress(opt_task['output']['stress'])
            # strains = [s.zeroed(0.0001) for s in strains]
            # et_expansion = pdb_function(ElasticTensorExpansion.from_diff_fit,
            #     strains, pk_stresses, eq_stress=eq_stress, tol=1e-5)
            et_exp_raw = ElasticTensorExpansion.from_diff_fit(
                strains, pk_stresses, eq_stress=eq_stress, tol=1e-6)
            et_exp = et_exp_raw.voigt_symmetrized.convert_to_ieee(opt_struct)
            et_exp = et_exp.round(1)
            et_fit = ElasticTensor(et_exp[0])
            # Update elastic doc with TOEC stuff
            tec = et_exp.thermal_expansion_coeff(opt_struct, 300)
            elastic_doc.update({
                "elastic_tensor_expansion":
                elastic_sanitize(et_exp),
                "elastic_tensor_expansion_original":
                elastic_sanitize(et_exp_raw),
                "thermal_expansion_tensor":
                tec,
                "average_linear_thermal_expansion":
                np.trace(tec) / 3
            })
        et = et_fit.voigt_symmetrized.convert_to_ieee(opt_struct)
        vasp_input = opt_task['input']
        if 'structure' in vasp_input:
            vasp_input.pop('structure')
        completed_at = max([d['completed_at'] for d in defo_tasks])
        elastic_doc.update({
            "optimization_task_id":
            opt_task['task_id'],
            "optimization_dir_name":
            opt_task['dir_name'],
            "cauchy_stresses":
            stresses,
            "strains":
            strains,
            "elastic_tensor":
            elastic_sanitize(et.zeroed(0.01).round(0)),
            # Convert compliance to 10^-12 Pa
            "compliance_tensor":
            elastic_sanitize(et.compliance_tensor * 1000),
            "elastic_tensor_original":
            elastic_sanitize(et_fit),
            "optimized_structure":
            opt_struct,
            "spacegroup":
            input_struct.get_space_group_info()[0],
            "input_structure":
            input_struct,
            "completed_at":
            completed_at,
            "optimization_input":
            vasp_input,
            "order":
            order,
            "pretty_formula":
            opt_struct.composition.reduced_formula
        })
        # Add magnetic type
        mag = CollinearMagneticStructureAnalyzer(opt_struct).ordering.value
        elastic_doc['magnetic_type'] = mag_types[mag]
        try:
            prop_dict = et.get_structure_property_dict(opt_struct)
            prop_dict.pop('structure')
        except ValueError:
            logger.debug("Negative K or G found, structure property "
                         "dict not computed")
            prop_dict = et.property_dict
        for k, v in prop_dict.items():
            if k in ['homogeneous_poisson', 'universal_anisotropy']:
                prop_dict[k] = np.round(v, 2)
            else:
                prop_dict[k] = np.round(v, 0)
        elastic_doc.update(prop_dict)
        # Update with state and warnings
        state, warnings = get_state_and_warnings(elastic_doc)
        elastic_doc.update({"state": state, "warnings": warnings})
        # TODO: add kpoints params?
        return elastic_doc
    else:
        logger.info("insufficient valid strains for {}".format(
            opt_task['formula_pretty']))
        return None
Exemplo n.º 9
0
def get_strain_state_dict(strains,
                          stresses,
                          eq_stress=None,
                          tol=1e-10,
                          add_eq=True,
                          sort=True):
    """
    Creates a dictionary of voigt-notation stress-strain sets
    keyed by "strain state", i. e. a tuple corresponding to
    the non-zero entries in ratios to the lowest nonzero value,
    e.g. [0, 0.1, 0, 0.2, 0, 0] -> (0,1,0,2,0,0)
    This allows strains to be collected in stencils as to
    evaluate parameterized finite difference derivatives

    Args:
        strains (Nx3x3 array-like): strain matrices
        stresses (Nx3x3 array-like): stress matrices
        eq_stress (Nx3x3 array-like): equilibrium stress
        tol (float): tolerance for sorting strain states
        add_eq (bool): flag for whether to add eq_strain
            to stress-strain sets for each strain state
        sort (bool): flag for whether to sort strain states

    Returns:
        OrderedDict with strain state keys and dictionaries
        with stress-strain data corresponding to strain state
    """
    # Recast stress/strains
    vstrains = np.array([Strain(s).zeroed(tol).voigt for s in strains])
    vstresses = np.array([Stress(s).zeroed(tol).voigt for s in stresses])
    # Collect independent strain states:
    independent = set(
        [tuple(np.nonzero(vstrain)[0].tolist()) for vstrain in vstrains])
    strain_state_dict = OrderedDict()
    if add_eq:
        if eq_stress is not None:
            veq_stress = Stress(eq_stress).voigt
        else:
            veq_stress = find_eq_stress(strains, stresses).voigt

    for n, ind in enumerate(independent):
        # match strains with templates
        template = np.zeros(6, dtype=bool)
        np.put(template, ind, True)
        template = np.tile(template, [vstresses.shape[0], 1])
        mode = (template == (np.abs(vstrains) > 1e-10)).all(axis=1)
        mstresses = vstresses[mode]
        mstrains = vstrains[mode]
        # Get "strain state", i.e. ratio of each value to minimum strain
        min_nonzero_ind = np.argmin(np.abs(np.take(mstrains[-1], ind)))
        min_nonzero_val = np.take(mstrains[-1], ind)[min_nonzero_ind]
        strain_state = mstrains[-1] / min_nonzero_val
        strain_state = tuple(strain_state)

        if add_eq:
            # add zero strain state
            mstrains = np.vstack([mstrains, np.zeros(6)])
            mstresses = np.vstack([mstresses, veq_stress])
        # sort strains/stresses by strain values
        if sort:
            mstresses = mstresses[mstrains[:, ind[0]].argsort()]
            mstrains = mstrains[mstrains[:, ind[0]].argsort()]
        strain_state_dict[strain_state] = {
            "strains": mstrains,
            "stresses": mstresses
        }
    return strain_state_dict
Exemplo n.º 10
0
    def run_task(self, fw_spec):

        # Get optimized structure
        # TODO: will this find the correct path if the workflow is rerun from the start?
        optimize_loc = fw_spec["calc_locs"][0]["path"]
        logger.info(
            "PARSING INITIAL OPTIMIZATION DIRECTORY: {}".format(optimize_loc))
        drone = VaspDrone()
        optimize_doc = drone.assimilate(optimize_loc)
        opt_struct = Structure.from_dict(
            optimize_doc["calcs_reversed"][0]["output"]["structure"])

        d = {
            "analysis": {},
            "deformation_tasks": fw_spec["deformation_tasks"],
            "initial_structure": self['structure'].as_dict(),
            "optimized_structure": opt_struct.as_dict()
        }
        if fw_spec.get("tags", None):
            d["tags"] = fw_spec["tags"]
        dtypes = fw_spec["deformation_tasks"].keys()
        defos = [
            fw_spec["deformation_tasks"][dtype]["deformation_matrix"]
            for dtype in dtypes
        ]
        stresses = [
            fw_spec["deformation_tasks"][dtype]["stress"] for dtype in dtypes
        ]
        stress_dict = {
            IndependentStrain(defo): Stress(stress)
            for defo, stress in zip(defos, stresses)
        }

        logger.info("ANALYZING STRESS/STRAIN DATA")
        # DETERMINE IF WE HAVE 6 "UNIQUE" deformations
        if len(set([de[:3] for de in dtypes])) == 6:
            # Perform Elastic tensor fitting and analysis
            result = ElasticTensor.from_stress_dict(stress_dict)
            d["elastic_tensor"] = result.voigt.tolist()
            kg_average = result.kg_average
            d.update({
                "K_Voigt": kg_average[0],
                "G_Voigt": kg_average[1],
                "K_Reuss": kg_average[2],
                "G_Reuss": kg_average[3],
                "K_Voigt_Reuss_Hill": kg_average[4],
                "G_Voigt_Reuss_Hill": kg_average[5]
            })
            d["universal_anisotropy"] = result.universal_anisotropy
            d["homogeneous_poisson"] = result.homogeneous_poisson

        else:
            raise ValueError("Fewer than 6 unique deformations")

        d["state"] = "successful"

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = MMVaspDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("ELASTIC ANALYSIS COMPLETE")
        return FWAction()
Exemplo n.º 11
0
    def run_task(self, fw_spec):
        db_dir = os.environ['DB_LOC']
        db_path = os.path.join(db_dir, 'tasks_db.json')
        i = fw_spec['original_task_id']

        with open(db_path) as f:
            db_creds = json.load(f)
        connection = MongoClient(db_creds['host'], db_creds['port'])
        tdb = connection[db_creds['database']]
        tdb.authenticate(db_creds['admin_user'], db_creds['admin_password'])
        tasks = tdb[db_creds['collection']]
        elasticity = tdb['elasticity']
        ndocs = tasks.find({
            "original_task_id": i,
            "state": "successful"
        }).count()
        existing_doc = elasticity.find_one({"relaxation_task_id": i})
        if existing_doc:
            print "Updating: " + i
        else:
            print "New material: " + i
        d = {"analysis": {}, "error": [], "warning": []}
        d["ndocs"] = ndocs
        o = tasks.find_one({"task_id": i}, {
            "pretty_formula": 1,
            "spacegroup": 1,
            "snl": 1,
            "snl_final": 1,
            "run_tags": 1
        })
        if not o:
            raise ValueError("Cannot find original task id")
        # Get stress from deformed structure
        d["deformation_tasks"] = {}
        ss_dict = {}
        for k in tasks.find({"original_task_id": i}, {
                "deformation_matrix": 1,
                "calculations.output": 1,
                "state": 1,
                "task_id": 1
        }):
            defo = k['deformation_matrix']
            d_ind = np.nonzero(defo - np.eye(3))
            delta = Decimal((defo - np.eye(3))[d_ind][0])
            # Normal deformation
            if d_ind[0] == d_ind[1]:
                dtype = "_".join(
                    ["d", str(d_ind[0][0]), "{:.0e}".format(delta)])
            # Shear deformation
            else:
                dtype = "_".join(
                    ["s",
                     str(d_ind[0] + d_ind[1]), "{:.0e}".format(delta)])
            sm = IndependentStrain(defo)
            if dtype in d["deformation_tasks"].keys():
                print "old_task: {}".format(
                    d["deformation_tasks"][dtype]["task_id"])
                print "new_task: {}".format(k["task_id"])
                raise ValueError("Duplicate deformation task in database.")
            d["deformation_tasks"][dtype] = {
                "state": k["state"],
                "deformation_matrix": defo,
                "strain": sm.tolist(),
                "task_id": k["task_id"]
            }
            if k["state"] == "successful":
                st = Stress(k["calculations"][-1]["output"] \
                            ["ionic_steps"][-1]["stress"])
                ss_dict[sm] = st
        d["snl"] = o["snl"]
        if "run_tags" in o.keys():
            d["run_tags"] = o["run_tags"]
            for tag in o["run_tags"]:
                if isinstance(tag, dict):
                    if "input_id" in tag.keys():
                        d["input_mp_id"] = tag["input_id"]
        d["snl_final"] = o["snl_final"]
        d["pretty_formula"] = o["pretty_formula"]

        # Old input mp-id style
        if o["snl"]["about"].get("_mp_id"):
            d["material_id"] = o["snl"]["about"]["_mp_id"]

        # New style
        elif "input_mp_id" in d:
            d["material_id"] = d["input_mp_id"]
        else:
            d["material_id"] = None
        d["relaxation_task_id"] = i

        calc_struct = Structure.from_dict(o["snl_final"])
        # TODO:
        # JHM: This test is unnecessary at the moment, but should be redone
        """
        conventional = is_conventional(calc_struct)
        if conventional:
            d["analysis"]["is_conventional"] = True
        else:
            d["analysis"]["is_conventional"] = False
        """
        d["spacegroup"] = o.get("spacegroup", "Unknown")

        if ndocs >= 20:
            # Perform Elastic tensor fitting and analysis
            result = ElasticTensor.from_stress_dict(ss_dict)
            d["elastic_tensor"] = result.voigt.tolist()
            kg_average = result.kg_average
            d.update({
                "K_Voigt": kg_average[0],
                "G_Voigt": kg_average[1],
                "K_Reuss": kg_average[2],
                "G_Reuss": kg_average[3],
                "K_Voigt_Reuss_Hill": kg_average[4],
                "G_Voigt_Reuss_Hill": kg_average[5]
            })
            d["universal_anisotropy"] = result.universal_anisotropy
            d["homogeneous_poisson"] = result.homogeneous_poisson
            if ndocs < 24:
                d["warning"].append("less than 24 tasks completed")

            # Perform filter checks
            symm_t = result.voigt_symmetrized
            d["symmetrized_tensor"] = symm_t.voigt.tolist()
            d["analysis"]["not_rare_earth"] = True
            for s in calc_struct.species:
                if s.is_rare_earth_metal:
                    d["analysis"]["not_rare_earth"] = False
            eigvals = np.linalg.eigvals(symm_t.voigt)
            eig_positive = np.all((eigvals > 0) & np.isreal(eigvals))
            d["analysis"]["eigval_positive"] = bool(eig_positive)
            c11 = symm_t.voigt[0][0]
            c12 = symm_t.voigt[0][1]
            c13 = symm_t.voigt[0][2]
            c23 = symm_t.voigt[1][2]
            d["analysis"]["c11_c12"] = not (abs(
                (c11 - c12) / c11) < 0.05 or c11 < c12)
            d["analysis"]["c11_c13"] = not (abs(
                (c11 - c13) / c11) < 0.05 or c11 < c13)
            d["analysis"]["c11_c23"] = not (abs(
                (c11 - c23) / c11) < 0.1 or c11 < c23)
            d["analysis"]["K_R"] = not (d["K_Reuss"] < 2)
            d["analysis"]["G_R"] = not (d["G_Reuss"] < 2)
            d["analysis"]["K_V"] = not (d["K_Voigt"] < 2)
            d["analysis"]["G_V"] = not (d["G_Voigt"] < 2)
            filter_state = np.all(d["analysis"].values())
            d["analysis"]["filter_pass"] = bool(filter_state)
            d["analysis"]["eigval"] = list(eigvals)

            # TODO:
            # JHM: eventually we can reintroduce the IEEE conversion
            #       but as of now it's not being used, and it should
            #       be in pymatgen
            """
            # IEEE Conversion
            try:
                ieee_tensor = IEEE_conversion.get_ieee_tensor(struct_final, result)
                d["elastic_tensor_IEEE"] = ieee_tensor[0].tolist()
                d["analysis"]["IEEE"] = True
            except Exception as e:
                d["elastic_tensor_IEEE"] = None
                d["analysis"]["IEEE"] = False
                d["error"].append("Unable to get IEEE tensor: {}".format(e))
            """
            # Add thermal properties
            nsites = calc_struct.num_sites
            volume = calc_struct.volume
            natoms = calc_struct.composition.num_atoms
            weight = calc_struct.composition.weight
            num_density = 1e30 * nsites / volume
            mass_density = 1.6605e3 * nsites * volume * weight / \
                           (natoms * volume)
            tot_mass = sum([e.atomic_mass for e in calc_struct.species])
            avg_mass = 1.6605e-27 * tot_mass / natoms
            y_mod = 9e9 * result.k_vrh * result.g_vrh / \
                    (3. * result.k_vrh * result.g_vrh)
            trans_v = 1e9 * result.k_vrh / mass_density**0.5
            long_v = 1e9 * result.k_vrh + \
                     4./3. * result.g_vrh / mass_density**0.5
            clarke = 0.87 * 1.3806e-23 * avg_mass**(-2./3.) * \
                     mass_density**(1./6.) * y_mod**0.5
            cahill = 1.3806e-23 / 2.48 * num_density**(2./3.) * long_v + \
                     2 * trans_v
            snyder_ac = 0.38483 * avg_mass * \
                        (long_v + 2./3.*trans_v)**3. / \
                        (300. * num_density**(-2./3.) * nsites**(1./3.))
            snyder_opt = 1.66914e-23 * (long_v + 2./3.*trans_v) / \
                         num_density**(-2./3.) * \
                         (1 - nsites**(-1./3.))
            snyder_total = snyder_ac + snyder_opt
            debye = 2.489e-11 * avg_mass**(-1./3.) * \
                    mass_density**(-1./6.) * y_mod**0.5

            d["thermal"] = {
                "num_density": num_density,
                "mass_density": mass_density,
                "avg_mass": avg_mass,
                "num_atom_per_unit_formula": natoms,
                "youngs_modulus": y_mod,
                "trans_velocity": trans_v,
                "long_velocity": long_v,
                "clarke": clarke,
                "cahill": cahill,
                "snyder_acou_300K": snyder_ac,
                "snyder_opt": snyder_opt,
                "snyder_total": snyder_total,
                "debye": debye
            }
        else:
            d['state'] = "Fewer than 20 successful tasks completed"
            return FWAction()

        if o["snl"]["about"].get("_kpoints_density"):
            d["kpoint_density"] = o["snl"]["about"].get("_kpoints_density")

        if d["error"]:
            raise ValueError("Elastic analysis failed: {}".format(d["error"]))
        elif d["analysis"]["filter_pass"]:
            d["state"] = "successful"
        else:
            d["state"] = "filter_failed"
        elasticity.update({"relaxation_task_id": d["relaxation_task_id"]},
                          d,
                          upsert=True)
        return FWAction()
Exemplo n.º 12
0
    def run_task(self, fw_spec):
        d = {
            "analysis": {},
            "deformation_tasks": fw_spec["deformation_tasks"],
            "initial_structure": self['structure'].as_dict()
        }

        # Get optimized structure
        calc_locs_opt = [
            cl for cl in fw_spec['calc_locs'] if 'optimize' in cl['name']
        ]
        if calc_locs_opt:
            optimize_loc = calc_locs_opt[-1]['path']
            logger.info("Parsing initial optimization directory: {}".format(
                optimize_loc))
            drone = VaspDrone()
            optimize_doc = drone.assimilate(optimize_loc)
            opt_struct = Structure.from_dict(
                optimize_doc["calcs_reversed"][0]["output"]["structure"])
            d.update({"optimized_structure": opt_struct.as_dict()})

        # TODO: @montoyjh: does the below have anything to do with elastic tensor? If not, try
        # the more general fw_spec_field approach in the VaspToDb rather than hard-coding the
        # tags insertion here. -computron
        if fw_spec.get("tags", None):
            d["tags"] = fw_spec["tags"]

        results = fw_spec["deformation_tasks"].values()
        defos = [r["deformation_matrix"] for r in results]
        stresses = [r["stress"] for r in results]
        strains = np.array([Strain(r["strain"]).voigt for r in results])
        stress_dict = {
            IndependentStrain(defo): Stress(stress)
            for defo, stress in zip(defos, stresses)
        }

        logger.info("Analyzing stress/strain data")
        # Determine if we have 6 unique deformations
        # TODO: @montoyjh: what if it's a cubic system? don't need 6. -computron
        if np.linalg.matrix_rank(strains) == 6:
            # Perform Elastic tensor fitting and analysis
            result = ElasticTensor.from_stress_dict(stress_dict)
            d["elastic_tensor"] = result.voigt.tolist()
            d.update(result.property_dict)

        else:
            raise ValueError("Fewer than 6 unique deformations")

        d["state"] = "successful"

        # Save analysis results in json or db
        db_file = env_chk(self.get('db_file'), fw_spec)
        if not db_file:
            with open("elasticity.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = VaspCalcDb.from_db_file(db_file, admin=True)
            db.collection = db.db["elasticity"]
            db.collection.insert_one(d)
            logger.info("Elastic analysis complete.")
        return FWAction()
Exemplo n.º 13
0
def toec_fit(strains, stresses, eq_stress=None, zero_crit=1e-10):
    """
    A third-order elastic constant fitting function based on 
    central-difference derivatives with respect to distinct
    strain states.  The algorithm is summarized as follows:

    1. Identify distinct strain states as sets of indices 
       for which nonzero strain values exist, typically
       [(0), (1), (2), (3), (4), (5), (0, 1) etc.]
    2. For each strain state, find and sort strains and
       stresses by strain value.
    3. Find first and second derivatives of each stress
       with respect to scalar variable corresponding to
       the smallest perturbation in the strain.
    4. Use the pseudoinverse of a matrix-vector expression 
       corresponding to the parameterized stress-strain
       relationship and multiply that matrix by the respective 
       calculated first or second derivatives from the
       previous step.
    5. Place the calculated second and third-order elastic 
       constants appropriately.

    Args:
        strains (nx3x3 array-like): Array of 3x3 strains
            to use in fitting of TOEC and SOEC
        stresses (nx3x3 array-like): Array of 3x3 stresses
            to use in fitting of TOEC and SOEC.  These
            should be PK2 stresses.
        eq_stress (3x3 array-like): stress corresponding to
            equilibrium strain (i. e. "0" strain state).
            If not specified, function will try to find
            the state in the list of provided stresses
            and strains.  If not found, defaults to 0.
        zero_crit (float): value for which strains below
            are ignored in identifying strain states.
    """

    if len(stresses) != len(strains):
        raise ValueError("Length of strains and stresses are not equivalent")
    vstresses = np.array([Stress(stress).voigt for stress in stresses])
    vstrains = np.array([Strain(strain).voigt for strain in strains])
    vstrains[np.abs(vstrains) < zero_crit] = 0

    # Try to find eq_stress if not specified
    if eq_stress is not None:
        veq_stress = Stress(eq_stress).voigt
    else:
        veq_stress = vstresses[np.all(vstrains == 0, axis=1)]
        if veq_stress:
            if np.shape(veq_stress) > 1 and not \
               (abs(veq_stress - veq_stress[0]) < 1e-8).all():
                raise ValueError(
                    "Multiple stresses found for equilibrium strain"
                    " state, please specify equilibrium stress or  "
                    " remove extraneous stresses.")
            veq_stress = veq_stress[0]
        else:
            veq_stress = np.zeros(6)

    # Collect independent strain states:
    independent = set(
        [tuple(np.nonzero(vstrain)[0].tolist()) for vstrain in vstrains])

    strain_states = []
    dsde = np.zeros((6, len(independent)))
    d2sde2 = np.zeros((6, len(independent)))
    for n, ind in enumerate(independent):
        # match strains with templates
        template = np.zeros(6, dtype=bool)
        np.put(template, ind, True)
        template = np.tile(template, [vstresses.shape[0], 1])
        mode = (template == (np.abs(vstrains) > 1e-10)).all(axis=1)
        mstresses = vstresses[mode]
        mstrains = vstrains[mode]
        # add zero strain state
        mstrains = np.vstack([mstrains, np.zeros(6)])
        mstresses = np.vstack([mstresses, np.zeros(6)])
        # sort strains/stresses by strain values
        mstresses = mstresses[mstrains[:, ind[0]].argsort()]
        mstrains = mstrains[mstrains[:, ind[0]].argsort()]
        strain_states.append(mstrains[-1] / \
                             np.min(mstrains[-1][np.nonzero(mstrains[0])]))
        diff = np.diff(mstrains, axis=0)
        if not (abs(diff - diff[0]) < 1e-8).all():
            raise ValueError("Stencil for strain state {} must be odd-sampling"
                             " centered at 0.".format(ind))
        h = np.min(diff[np.nonzero(diff)])
        coef1 = central_diff(1, len(mstresses))
        coef2 = central_diff(2, len(mstresses))
        if eq_stress is not None:
            mstresses[3] = veq_stress
        dsde[:, n] = np.dot(np.transpose(mstresses), coef1) / h
        d2sde2[:, n] = np.dot(np.transpose(mstresses), coef2) / h**2

    m2i, m3i = generate_pseudo(strain_states)
    s2vec = np.ravel(dsde.T)
    c2vec = np.dot(m2i, s2vec)
    c2 = np.zeros((6, 6))
    c2[np.triu_indices(6)] = c2vec
    c2 = c2 + c2.T - np.diag(np.diag(c2))
    c3 = np.zeros((6, 6, 6))
    s3vec = np.ravel(d2sde2.T)
    c3vec = np.dot(m3i, s3vec)
    list_indices = list(itertools.combinations_with_replacement(range(6), r=3))
    indices_ij = itertools.combinations_with_replacement(range(6), r=3)

    indices = list(itertools.combinations_with_replacement(range(6), r=3))
    for n, (i, j, k) in enumerate(indices):
        c3[i,j,k] = c3[i,k,j] = c3[j,i,k] = c3[j,k,i] = \
                c3[k,i,j] = c3[k,j,i] = c3vec[n]
    return TensorBase.from_voigt(c2), TensorBase.from_voigt(c3)
Exemplo n.º 14
0
 def setUp(self):
     self.rand_stress = Stress(np.random.randn(3, 3))
     self.symm_stress = Stress([[0.51, 2.29, 2.42], [2.29, 5.14, 5.07], [2.42, 5.07, 5.33]])
     self.non_symm = Stress([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6], [0.2, 0.5, 0.5]])
Exemplo n.º 15
0
    def process_item(self, item):
        """
        Process the tasks and materials into a dielectrics collection

        Args:
            item dict: a dict of material_id, structure, and tasks

        Returns:
            dict: a dieletrics dictionary  
        """
        root_mats = [
            mat for mat in item["mats"]
            if mat.get("inputs", {}).get("structure optimization", None)
        ]
        deform_mats = [mat for mat in item["mats"] if mat not in root_mats]
        docs = []

        # TODO: What structure matcher parameters to use?
        # TODO: Should SM parameters be configurable?
        sm = StructureMatcher(primitive_cell=True,
                              scale=True,
                              attempt_supercell=False,
                              allow_subset=False,
                              comparator=ElementComparator())

        for r_mat in root_mats:

            # Enumerate over all deformations
            r_struc = Structure.from_dict(r_mat['initial_structure'])

            defos = []
            stresses = []
            strains = []
            m_ids = []

            for d_mat in deform_mats:
                # Find deformation matrix
                d_struc = Structure.from_dict(d_mat["initial_structure"])
                transform_matrix = np.transpose(
                    np.linalg.solve(r_struc.lattice.matrix,
                                    d_struc.lattice.matrix))
                # apply deformation matrix to root_mat and check if the two structures match
                dfm = Deformation(transform_matrix)
                dfm_struc = dfm.apply_to_structure(r_struc)

                # if match store stress and strain matrix
                if sm.fit(dfm_struc, d_struc):
                    # This is a deformtion of the root struc
                    defos.append(dfm)
                    stresses.append(d_mat['stress'])
                    strains.append(dfm.green_lagrange_strain)
                    m_ids.append(d_mat['material_id'])

            stress_dict = {
                IndependentStrain(defo): Stress(stress)
                for defo, stress in zip(defos, stresses)
            }

            self.__logger.info("Analyzing stress/strain data")

            # Determine if we have 6 unique deformations
            if np.linalg.matrix_rank(strains) == 6:
                # Perform Elastic tensor fitting and analysis
                result = ElasticTensor.from_stress_dict(stress_dict)

                d = {
                    "material_id": r_mat["material_id"],
                    "elasticity": {
                        "elastic_tensor": result.voigt.tolist(),
                        "material_ids": m_ids
                    }
                }

                d["elasticity"].update(result.property_dict)

                docs.append(d)
            else:
                self.__logger.warn(
                    "Fewer than 6 unique deformations for {}".format(
                        r_mat["material_id"]))

        return docs
Exemplo n.º 16
0
 def setUp(self):
     with open(os.path.join(test_dir, 'test_toec_data.json')) as f:
         self.data_dict = json.load(f)
     self.strains = [Strain(sm) for sm in self.data_dict['strains']]
     self.pk_stresses = [Stress(d) for d in self.data_dict['pk_stresses']]
Exemplo n.º 17
0
 def setUp(self):
     with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "test_toec_data.json")) as f:
         self.data_dict = json.load(f)
     self.strains = [Strain(sm) for sm in self.data_dict["strains"]]
     self.pk_stresses = [Stress(d) for d in self.data_dict["pk_stresses"]]