Esempio n. 1
0
    def __init__(self, dbf, comps, phases, conditions, **kwargs):
        self.conditions = conditions
        self.components = set(comps)
        self.phases = dict()
        self.statevars = dict()
        self.data = pd.DataFrame()

        self._phases = dict([[name, dbf.phases[name]] for name in phases])
        self._phase_callables = dict()
        self._gradient_callables = dict()
        self._molefrac_callables = dict()
        self._molefrac_jac_callables = dict()
        self._variables = dict()
        self._sublattice_dof = dict()
        self.statevars = dict()
        for key in ['T', 'P']:
            try:
                self.statevars[v.StateVariable(key)] = kwargs[key]
            except KeyError:
                pass

        # Construct models for each phase; prioritize user models
        self._models = unpack_kwarg(kwargs.pop('model', Model), \
            default_arg=Model)
        for name in phases:
            mod = self._models[name]
            if isinstance(mod, type):
                # Initialize the model
                self._models[name] = mod(dbf, self.components, name)

        self._build_objective_functions()

        self.data = energy_surf(dbf, comps, phases, model=self._models, \
            **kwargs)

        # self.data now contains energy surface information for the system
        # find simplex for a starting point; refine with optimization
        estimates = self.get_starting_simplex()
        logger.debug(estimates)
        self.result = self.minimize(estimates[0], estimates[1])
Esempio n. 2
0
def energy_surf(dbf, comps, phases, mode=None, **kwargs):
    """
    Sample the energy surface of a system containing the specified
    components and phases. Model parameters are taken from 'dbf' and any
    state variables (T, P, etc.) can be specified as keyword arguments.

    Parameters
    ----------
    dbf : Database
        Thermodynamic database containing the relevant parameters.
    comps : list
        Names of components to consider in the calculation.
    phases : list
        Names of phases to consider in the calculation.
    pdens : int, a dict of phase names to int, or a list of both, optional
        Number of points to sample per degree of freedom.

    Returns
    -------
    DataFrame of the energy as a function of composition, temperature, etc.

    Examples
    --------
    None yet.
    """
    # Here we check for any keyword arguments that are special, i.e.,
    # there may be keyword arguments that aren't state variables
    pdens_dict = unpack_kwarg(kwargs.pop('pdens', 2000), default_arg=2000)
    model_dict = unpack_kwarg(kwargs.pop('model', Model), default_arg=Model)

    # Convert keyword strings to proper state variable objects
    # If we don't do this, sympy will get confused during substitution
    statevar_dict = \
        dict((v.StateVariable(key), value) \
             for (key, value) in kwargs.items())

    # Generate all combinations of state variables for 'map' calculation
    # Wrap single values of state variables in lists
    # Use 'kwargs' because we want state variable names to be stringified
    statevar_values = [_listify(val) for val in kwargs.values()]
    statevars_to_map = [dict(zip(kwargs.keys(), prod)) \
        for prod in itertools.product(*statevar_values)]

    # Consider only the active phases
    active_phases = dict((name.upper(), dbf.phases[name.upper()]) \
        for name in phases)
    comp_sets = {}
    # Construct a list to hold all the data
    all_phase_data = []
    for phase_name, phase_obj in sorted(active_phases.items()):
        # Build the symbolic representation of the energy
        mod = model_dict[phase_name]
        # if this is an object type, we need to construct it
        if isinstance(mod, type):
            try:
                mod = mod(dbf, comps, phase_name)
            except DofError:
                # we can't build the specified phase because the
                # specified components aren't found in every sublattice
                # we'll just skip it
                logger.warning("""Suspending specified phase %s due to
                some sublattices containing only unspecified components""",
                               phase_name)
                continue
        # As a last resort, treat undefined symbols as zero
        # But warn the user when we do this
        # This is consistent with TC's behavior
        undefs = list(mod.ast.atoms(Symbol) - mod.ast.atoms(v.StateVariable))
        for undef in undefs:
            mod.ast = mod.ast.xreplace({undef: float(0)})
            logger.warning('Setting undefined symbol %s for phase %s to zero',
                           undef, phase_name)
        # Construct an ordered list of the variables
        variables, sublattice_dof = generate_dof(phase_obj, mod.components)

        # Build the "fast" representation of that model
        comp_sets[phase_name] = make_callable(mod.ast, \
            list(statevar_dict.keys()) + variables, mode=mode)

        # Get the site ratios in each sublattice
        site_ratios = list(phase_obj.sublattices)

        # Eliminate pure vacancy endmembers from the calculation
        vacancy_indices = list()
        for idx, sublattice in enumerate(phase_obj.constituents):
            if 'VA' in sorted(sublattice) and 'VA' in sorted(comps):
                vacancy_indices.append(sorted(sublattice).index('VA'))
        if len(vacancy_indices) != len(phase_obj.constituents):
            vacancy_indices = None
        logger.debug('vacancy_indices: %s', vacancy_indices)
        # Add all endmembers to guarantee their presence
        points = endmember_matrix(sublattice_dof,
                                  vacancy_indices=vacancy_indices)

        # Sample composition space for more points
        if sum(sublattice_dof) > len(sublattice_dof):
            points = np.concatenate((points,
                                     point_sample(sublattice_dof,
                                                  pdof=pdens_dict[phase_name])
                                    ))



        # If there are nontrivial sublattices with vacancies in them,
        # generate a set of points where their fraction is zero and renormalize
        for idx, sublattice in enumerate(phase_obj.constituents):
            if 'VA' in set(sublattice) and len(sublattice) > 1:
                var_idx = variables.index(v.SiteFraction(phase_name, idx, 'VA'))
                addtl_pts = np.copy(points)
                # set vacancy fraction to log-spaced between 1e-10 and 1e-6
                addtl_pts[:, var_idx] = np.power(10.0, -10.0*(1.0 - addtl_pts[:, var_idx]))
                # renormalize site fractions
                cur_idx = 0
                for ctx in sublattice_dof:
                    end_idx = cur_idx + ctx
                    addtl_pts[:, cur_idx:end_idx] /= \
                        addtl_pts[:, cur_idx:end_idx].sum(axis=1)[:, None]
                    cur_idx = end_idx
                # add to points matrix
                points = np.concatenate((points, addtl_pts), axis=0)

        data_dict = {'Phase': phase_name}
        # Generate input d.o.f matrix for all state variable combinations
        for statevars in statevars_to_map:
            # Prefill the state variable arguments to the energy function
            energy_func = \
                lambda *args: comp_sets[phase_name](
                    *itertools.chain(list(statevars.values()),
                                     args))
            # Get the stable points and energies for this configuration
            # Set max refinements equal to the number of independent dof
            mxr = sum(phase_obj.sublattices) - len(phase_obj.sublattices)
            refined_points, energies = \
                refine_energy_surf(points, None, phase_obj, comps,
                                   variables, energy_func, max_iterations=-1)
            try:
                data_dict['GM'].extend(energies)
                for statevar in kwargs.keys():
                    data_dict[statevar].extend(
                        list(np.repeat(list(statevars.values()),
                                       len(refined_points))))
            except KeyError:
                data_dict['GM'] = list(energies)
                for statevar in kwargs.keys():
                    data_dict[statevar] = \
                        list(np.repeat(list(statevars.values()),
                                       len(refined_points)))

            # Map the internal degrees of freedom to global coordinates

            # Normalize site ratios
            # Normalize by the sum of site ratios times a factor
            # related to the site fraction of vacancies
            site_ratio_normalization = np.zeros(len(refined_points))
            for idx, sublattice in enumerate(phase_obj.constituents):
                vacancy_column = np.ones(len(refined_points))
                if 'VA' in set(sublattice):
                    var_idx = variables.index(v.SiteFraction(phase_name, idx, 'VA'))
                    vacancy_column -= refined_points[:, var_idx]
                site_ratio_normalization += site_ratios[idx] * vacancy_column

            for comp in sorted(comps):
                if comp == 'VA':
                    continue
                avector = [float(cur_var.species == comp) * \
                    site_ratios[cur_var.sublattice_index] for cur_var in variables]
                try:
                    data_dict['X('+comp+')'].extend(list(np.divide(np.dot(
                        refined_points[:, :], avector), site_ratio_normalization)))
                except KeyError:
                    data_dict['X('+comp+')'] = list(np.divide(np.dot(
                        refined_points[:, :], avector), site_ratio_normalization))

            # Copy coordinate information into data_dict
            # TODO: Is there a more memory-efficient way to deal with this?
            # Perhaps with hierarchical indexing...
            try:
                for column_idx, data in enumerate(refined_points.T):
                    data_dict[str(variables[column_idx])].extend(list(data))
            except KeyError:
                for column_idx, data in enumerate(refined_points.T):
                    data_dict[str(variables[column_idx])] = list(data)

        all_phase_data.append(pd.DataFrame(data_dict))

    # all_phases_data now contains energy surface information for the system
    return pd.concat(all_phase_data, axis=0, join='outer', \
                            ignore_index=True, verify_integrity=False)