Esempio n. 1
0
def test_missing_phase_records_passed_to_equilibrium_raises():
    "equilibrium should raise an error if all the active phases are not included in the phase_records"
    my_phases = ['LIQUID', 'FCC_A1']
    subset_phases = ['FCC_A1']
    comps = ['AL', 'FE', 'VA']
    conds = {v.T: 1400, v.P: 101325, v.N: 1.0, v.X('AL'): 0.55}

    models = instantiate_models(ALFE_DBF, comps, my_phases)
    phase_records = build_phase_records(ALFE_DBF, comps, my_phases, conds, models)

    models_subset = instantiate_models(ALFE_DBF, comps, subset_phases)
    phase_records_subset = build_phase_records(ALFE_DBF, comps, subset_phases, conds, models_subset)

    # Under-specified models
    with pytest.raises(ValueError):
        equilibrium(ALFE_DBF, comps, my_phases, conds, verbose=True, model=models_subset, phase_records=phase_records)

    # Under-specified phase_records
    with pytest.raises(ValueError):
        equilibrium(ALFE_DBF, comps, my_phases, conds, verbose=True, model=models, phase_records=phase_records_subset)
Esempio n. 2
0
def test_phase_records_passed_to_equilibrium():
    "Pre-built phase records can be passed to equilibrium."
    my_phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'AL5FE2', 'AL2FE', 'AL13FE4', 'AL5FE4']
    comps = ['AL', 'FE', 'VA']
    conds = {v.T: 1400, v.P: 101325, v.N: 1.0, v.X('AL'): 0.55}

    models = instantiate_models(ALFE_DBF, comps, my_phases)
    phase_records = build_phase_records(ALFE_DBF, comps, my_phases, conds, models)

    # With models passed
    eqx = equilibrium(ALFE_DBF, comps, my_phases, conds, verbose=True, model=models, phase_records=phase_records)
    assert_allclose(eqx.GM.values.flat[0], -9.608807e4)
Esempio n. 3
0
def test_missing_models_with_phase_records_passed_to_equilibrium_raises():
    "equilibrium should raise an error if all the active phases are not included in the phase_records"
    my_phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'AL5FE2', 'AL2FE', 'AL13FE4', 'AL5FE4']
    comps = ['AL', 'FE', 'VA']
    conds = {v.T: 1400, v.P: 101325, v.N: 1.0, v.X('AL'): 0.55}

    models = instantiate_models(ALFE_DBF, comps, my_phases)
    phase_records = build_phase_records(ALFE_DBF, comps, my_phases, conds, models)

    with pytest.raises(ValueError):
        # model=models NOT passed
        equilibrium(ALFE_DBF, comps, my_phases, conds, verbose=True, phase_records=phase_records)
Esempio n. 4
0
def test_phase_records_are_picklable():
    dof = np.array([300, 1.0])

    mod = Model(ALNIPT_DBF, ['AL'], 'LIQUID')
    prxs = build_phase_records(ALNIPT_DBF, [v.Species('AL')], ['LIQUID'], {v.T: 300}, {'LIQUID': mod}, build_gradients=True, build_hessians=True)
    prx_liquid = prxs['LIQUID']

    out = np.array([0.0])
    prx_liquid.obj(out, dof)

    prx_loaded = pickle.loads(pickle.dumps(prx_liquid))
    out_unpickled = np.array([0.0])
    prx_loaded.obj(out_unpickled, dof)

    assert np.isclose(out_unpickled[0], -1037.653911)
    assert np.all(out == out_unpickled)
def get_thermochemical_data(dbf,
                            comps,
                            phases,
                            datasets,
                            weight_dict=None,
                            symbols_to_fit=None):
    """

    Parameters
    ----------
    dbf : pycalphad.Database
        Database to consider
    comps : list
        List of active component names
    phases : list
        List of phases to consider
    datasets : espei.utils.PickleableTinyDB
        Datasets that contain single phase data
    weight_dict : dict
        Dictionary of weights for each data type, e.g. {'HM': 200, 'SM': 2}
    symbols_to_fit : list
        Parameters to fit. Used to build the models and PhaseRecords.

    Returns
    -------
    list
        List of data dictionaries to iterate over
    """
    # phase by phase, then property by property, then by model exclusions
    if weight_dict is None:
        weight_dict = {}

    if symbols_to_fit is not None:
        symbols_to_fit = sorted(symbols_to_fit)
    else:
        symbols_to_fit = database_symbols_to_fit(dbf)

    # estimated from NIST TRC uncertainties
    property_std_deviation = {
        'HM': 500.0 / weight_dict.get('HM', 1.0),  # J/mol
        'SM': 0.2 / weight_dict.get('SM', 1.0),  # J/K-mol
        'CPM': 0.2 / weight_dict.get('CPM', 1.0),  # J/K-mol
    }
    properties = [
        'HM_FORM', 'SM_FORM', 'CPM_FORM', 'HM_MIX', 'SM_MIX', 'CPM_MIX'
    ]

    ref_states = []
    for el in get_pure_elements(dbf, comps):
        ref_state = ReferenceState(el, dbf.refstates[el]['phase'])
        ref_states.append(ref_state)
    all_data_dicts = []
    for phase_name in phases:
        for prop in properties:
            desired_data = get_prop_data(
                comps,
                phase_name,
                prop,
                datasets,
                additional_query=(where('solver').exists()))
            if len(desired_data) == 0:
                continue
            unique_exclusions = set([
                tuple(sorted(d.get('excluded_model_contributions', [])))
                for d in desired_data
            ])
            for exclusion in unique_exclusions:
                data_dict = {
                    'phase_name': phase_name,
                    'prop': prop,
                    # needs the following keys to be added:
                    # species, calculate_dict, phase_records, model, output, weights
                }
                # get all the data with these model exclusions
                if exclusion == tuple([]):
                    exc_search = (
                        ~where('excluded_model_contributions').exists()) & (
                            where('solver').exists())
                else:
                    exc_search = (where('excluded_model_contributions').test(
                        lambda x: tuple(sorted(x)) == exclusion)) & (
                            where('solver').exists())
                curr_data = get_prop_data(comps,
                                          phase_name,
                                          prop,
                                          datasets,
                                          additional_query=exc_search)
                calculate_dict = get_prop_samples(dbf, comps, phase_name,
                                                  curr_data)
                mod = Model(dbf, comps, phase_name, parameters=symbols_to_fit)
                if prop.endswith('_FORM'):
                    output = ''.join(prop.split('_')[:-1]) + 'R'
                    mod.shift_reference_state(
                        ref_states,
                        dbf,
                        contrib_mods={e: sympy.S.Zero
                                      for e in exclusion})
                else:
                    output = prop
                for contrib in exclusion:
                    mod.models[contrib] = sympy.S.Zero
                    mod.reference_model.models[contrib] = sympy.S.Zero
                species = sorted(unpack_components(dbf, comps), key=str)
                data_dict['species'] = species
                model = {phase_name: mod}
                statevar_dict = {
                    getattr(v, c, None): vals
                    for c, vals in calculate_dict.items()
                    if isinstance(getattr(v, c, None), v.StateVariable)
                }
                statevar_dict = OrderedDict(
                    sorted(statevar_dict.items(), key=lambda x: str(x[0])))
                str_statevar_dict = OrderedDict(
                    (str(k), vals) for k, vals in statevar_dict.items())
                phase_records = build_phase_records(
                    dbf,
                    species, [phase_name],
                    statevar_dict,
                    model,
                    output=output,
                    parameters={s: 0
                                for s in symbols_to_fit},
                    build_gradients=False,
                    build_hessians=False)
                data_dict['str_statevar_dict'] = str_statevar_dict
                data_dict['phase_records'] = phase_records
                data_dict['calculate_dict'] = calculate_dict
                data_dict['model'] = model
                data_dict['output'] = output
                data_dict['weights'] = np.array(
                    property_std_deviation[prop.split('_')[0]]) / np.array(
                        calculate_dict.pop('weights'))
                all_data_dicts.append(data_dict)
    return all_data_dicts
Esempio n. 6
0
def get_zpf_data(dbf: Database,
                 comps: Sequence[str],
                 phases: Sequence[str],
                 datasets: PickleableTinyDB,
                 parameters: Dict[str, float],
                 model: Optional[Dict[str, Type[Model]]] = None):
    """
    Return the ZPF data used in the calculation of ZPF error

    Parameters
    ----------
    comps : list
        List of active component names
    phases : list
        List of phases to consider
    datasets : espei.utils.PickleableTinyDB
        Datasets that contain single phase data
    parameters : dict
        Dictionary mapping symbols to optimize to their initial values
    model : Optional[Dict[str, Type[Model]]]
        Dictionary phase names to pycalphad Model classes.

    Returns
    -------
    list
        List of data dictionaries with keys ``weight``, ``phase_regions`` and ``dataset_references``.
    """
    desired_data = datasets.search(
        (tinydb.where('output') == 'ZPF')
        & (tinydb.where('components').test(lambda x: set(x).issubset(comps)))
        & (tinydb.where('phases').test(
            lambda x: len(set(phases).intersection(x)) > 0)))

    zpf_data = []  # 1:1 correspondence with each dataset
    for data in desired_data:
        data_comps = list(set(data['components']).union({'VA'}))
        species = sorted(unpack_components(dbf, data_comps), key=str)
        data_phases = filter_phases(dbf, species, candidate_phases=phases)
        models = instantiate_models(dbf,
                                    species,
                                    data_phases,
                                    model=model,
                                    parameters=parameters)
        # assumed N, P, T state variables
        phase_recs = build_phase_records(dbf,
                                         species,
                                         data_phases, {v.N, v.P, v.T},
                                         models,
                                         parameters=parameters,
                                         build_gradients=True,
                                         build_hessians=True)
        all_phase_points = {
            phase_name: _sample_phase_constitution(models[phase_name],
                                                   point_sample, True, 50)
            for phase_name in data_phases
        }
        all_regions = data['values']
        conditions = data['conditions']
        phase_regions = []
        # Each phase_region is one set of phases in equilibrium (on a tie-line),
        # e.g. [["ALPHA", ["B"], [0.25]], ["BETA", ["B"], [0.5]]]
        for idx, phase_region in enumerate(all_regions):
            # Extract the conditions for entire phase region
            pot_conds = _extract_pot_conds(conditions, idx)
            pot_conds.setdefault(v.N, 1.0)  # Add v.N condition, if missing
            # Extract all the phases and compositions from the tie-line points
            vertices = []
            for vertex in phase_region:
                phase_name, comp_conds, disordered_flag = _extract_phases_comps(
                    vertex)
                # Construct single-phase points satisfying the conditions for each phase in the region
                mod = models[phase_name]
                composition = _compute_vertex_composition(
                    data_comps, comp_conds)
                if np.any(np.isnan(composition)):
                    # We can't construct points because we don't have a known composition
                    has_missing_comp_cond = True
                    phase_points = None
                elif _phase_is_stoichiometric(mod):
                    has_missing_comp_cond = False
                    phase_points = None
                else:
                    has_missing_comp_cond = False
                    # Only sample points that have an average mass residual within tol
                    tol = 0.02
                    phase_points = _subsample_phase_points(
                        phase_recs[phase_name], all_phase_points[phase_name],
                        composition, tol)
                    assert phase_points.shape[
                        0] > 0, f"phase {phase_name} must have at least one set of points within the target tolerance {pot_conds} {comp_conds}"
                vtx = RegionVertex(phase_name, composition, comp_conds,
                                   phase_points, phase_recs, disordered_flag,
                                   has_missing_comp_cond)
                vertices.append(vtx)
            region = PhaseRegion(vertices, pot_conds, species, data_phases,
                                 models)
            phase_regions.append(region)

        data_dict = {
            'weight': data.get('weight', 1.0),
            'phase_regions': phase_regions,
            'dataset_reference': data['reference']
        }
        zpf_data.append(data_dict)
    return zpf_data
Esempio n. 7
0
def map_binary(
    dbf,
    comps,
    phases,
    conds,
    eq_kwargs=None,
    calc_kwargs=None,
    boundary_sets=None,
    verbose=False,
    summary=False,
):
    """
    Map a binary T-X phase diagram

    Parameters
    ----------
    dbf : Database
    comps : list of str
    phases : list of str
        List of phases to consider in mapping
    conds : dict
        Dictionary of conditions
    eq_kwargs : dict
        Dictionary of keyword arguments to pass to equilibrium
    verbose : bool
        Print verbose output for mapping
    boundary_sets : ZPFBoundarySets
        Existing ZPFBoundarySets

    Returns
    -------
    ZPFBoundarySets

    Notes
    -----
    Assumes conditions in T and X.

    Simple algorithm to map a binary phase diagram in T-X. More or less follows
    the algorithm described in Figure 2 by Snider et al. [1] with the small
    algorithmic improvement of constructing a convex hull to find the next
    potential two phase region.

    For each temperature, proceed along increasing composition, skipping two
    over two phase regions, once calculated.
    [1] J. Snider, I. Griva, X. Sun, M. Emelianenko, Set based framework for
        Gibbs energy minimization, Calphad. 48 (2015) 18-26.
        doi: 10.1016/j.calphad.2014.09.005

    """

    eq_kwargs = eq_kwargs or {}
    calc_kwargs = calc_kwargs or {}
    # implicitly add v.N to conditions
    if v.N not in conds:
        conds[v.N] = [1.0]
    if 'pdens' not in calc_kwargs:
        calc_kwargs['pdens'] = 2000

    species = unpack_components(dbf, comps)
    phases = filter_phases(dbf, species, phases)
    parameters = eq_kwargs.get('parameters', {})
    models = eq_kwargs.get('model')
    statevars = get_state_variables(models=models, conds=conds)
    if models is None:
        models = instantiate_models(dbf,
                                    comps,
                                    phases,
                                    model=eq_kwargs.get('model'),
                                    parameters=parameters,
                                    symbols_only=True)
    prxs = build_phase_records(dbf,
                               species,
                               phases,
                               conds,
                               models,
                               output='GM',
                               parameters=parameters,
                               build_gradients=True,
                               build_hessians=True)

    indep_comp = [
        key for key, value in conds.items()
        if isinstance(key, v.MoleFraction) and len(np.atleast_1d(value)) > 1
    ]
    indep_pot = [
        key for key, value in conds.items()
        if (type(key) is v.StateVariable) and len(np.atleast_1d(value)) > 1
    ]
    if (len(indep_comp) != 1) or (len(indep_pot) != 1):
        raise ValueError(
            'Binary map requires exactly one composition and one potential coordinate'
        )
    if indep_pot[0] != v.T:
        raise ValueError(
            'Binary map requires that a temperature grid must be defined')

    # binary assumption, only one composition specified.
    comp_cond = [k for k in conds.keys() if isinstance(k, v.X)][0]
    indep_comp = comp_cond.name[2:]
    indep_comp_idx = sorted(get_pure_elements(dbf, comps)).index(indep_comp)
    composition_grid = unpack_condition(conds[comp_cond])
    dX = composition_grid[1] - composition_grid[0]
    Xmax = composition_grid.max()
    temperature_grid = unpack_condition(conds[v.T])
    dT = temperature_grid[1] - temperature_grid[0]

    boundary_sets = boundary_sets or ZPFBoundarySets(comps, comp_cond)

    equilibria_calculated = 0
    equilibrium_time = 0
    convex_hulls_calculated = 0
    convex_hull_time = 0
    curr_conds = {key: unpack_condition(val) for key, val in conds.items()}
    str_conds = sorted([str(k) for k in curr_conds.keys()])
    grid_conds = _adjust_conditions(curr_conds)
    for T_idx in range(temperature_grid.size):
        T = temperature_grid[T_idx]
        iter_equilibria = 0
        if verbose:
            print("=== T = {} ===".format(float(T)))
        curr_conds[v.T] = [float(T)]
        eq_conds = deepcopy(curr_conds)
        Xmax_visited = 0.0
        hull_time = time.time()
        grid = calculate(dbf,
                         comps,
                         phases,
                         fake_points=True,
                         output='GM',
                         T=T,
                         P=grid_conds[v.P],
                         N=1,
                         model=models,
                         parameters=parameters,
                         to_xarray=False,
                         **calc_kwargs)
        hull = starting_point(eq_conds, statevars, prxs, grid)
        convex_hull_time += time.time() - hull_time
        convex_hulls_calculated += 1
        while Xmax_visited < Xmax:
            hull_compsets = find_two_phase_region_compsets(
                hull,
                T,
                indep_comp,
                indep_comp_idx,
                minimum_composition=Xmax_visited,
                misc_gap_tol=2 * dX)
            if hull_compsets is None:
                if verbose:
                    print(
                        "== Convex hull: max visited = {} - no multiphase phase compsets found =="
                        .format(Xmax_visited, hull_compsets))
                break
            Xeq = hull_compsets.mean_composition
            eq_conds[comp_cond] = [float(Xeq)]
            eq_time = time.time()
            start_point = starting_point(eq_conds, statevars, prxs, grid)
            eq_ds = _solve_eq_at_conditions(species, start_point, prxs, grid,
                                            str_conds, statevars, False)
            equilibrium_time += time.time() - eq_time
            equilibria_calculated += 1
            iter_equilibria += 1
            # composition sets in the plane of the calculation:
            # even for isopleths, this should always be two.
            compsets = get_compsets(eq_ds, indep_comp, indep_comp_idx)
            if verbose:
                print(
                    "== Convex hull: max visited = {:0.4f} - hull compsets: {} equilibrium compsets: {} =="
                    .format(Xmax_visited, hull_compsets, compsets))
            if compsets is None:
                # equilibrium calculation, didn't find a valid multiphase composition set
                # we need to find the next feasible one from the convex hull.
                Xmax_visited += dX
                continue
            else:
                boundary_sets.add_compsets(compsets, Xtol=0.10, Ttol=2 * dT)
                if compsets.max_composition > Xmax_visited:
                    Xmax_visited = compsets.max_composition
            # this seems kind of sloppy, but captures the effect that we want to
            # keep doing equilibrium calculations, if possible.
            while Xmax_visited < Xmax and compsets is not None:
                eq_conds[comp_cond] = [float(Xmax_visited + dX)]
                eq_time = time.time()
                # TODO: starting point could be improved by basing it off the previous calculation
                start_point = starting_point(eq_conds, statevars, prxs, grid)
                eq_ds = _solve_eq_at_conditions(species, start_point, prxs,
                                                grid, str_conds, statevars,
                                                False)
                equilibrium_time += time.time() - eq_time
                equilibria_calculated += 1
                compsets = get_compsets(eq_ds, indep_comp, indep_comp_idx)
                if compsets is not None:
                    Xmax_visited = compsets.max_composition
                    boundary_sets.add_compsets(compsets,
                                               Xtol=0.10,
                                               Ttol=2 * dT)
                else:
                    Xmax_visited += dX
                if verbose:
                    print("Equilibrium: at X = {:0.4f}, found compsets {}".
                          format(Xmax_visited, compsets))
        if verbose:
            print(iter_equilibria, 'equilibria calculated in this iteration.')
    if verbose or summary:
        print("{} Convex hulls calculated ({:0.1f}s)".format(
            convex_hulls_calculated, convex_hull_time))
        print("{} Equilbria calculated ({:0.1f}s)".format(
            equilibria_calculated, equilibrium_time))
        print("{:0.0f}% of brute force calculations skipped".format(
            100 * (1 - equilibria_calculated /
                   (composition_grid.size * temperature_grid.size))))
    return boundary_sets
def build_eqpropdata(
        data: tinydb.database.Document,
        dbf: Database,
        parameters: Optional[Dict[str, float]] = None,
        data_weight_dict: Optional[Dict[str, float]] = None) -> EqPropData:
    """
    Build EqPropData for the calculations corresponding to a single dataset.

    Parameters
    ----------
    data : tinydb.database.Document
        Document corresponding to a single ESPEI dataset.
    dbf : Database
        Database that should be used to construct the `Model` and `PhaseRecord` objects.
    parameters : Optional[Dict[str, float]]
        Mapping of parameter symbols to values.
    data_weight_dict : Optional[Dict[str, float]]
        Mapping of a data type (e.g. `HM` or `SM`) to a weight.

    Returns
    -------
    EqPropData
    """
    parameters = parameters if parameters is not None else {}
    data_weight_dict = data_weight_dict if data_weight_dict is not None else {}
    property_std_deviation = {
        'HM': 500.0,  # J/mol
        'SM': 0.2,  # J/K-mol
        'CPM': 0.2,  # J/K-mol
    }

    params_keys, _ = extract_parameters(parameters)

    data_comps = list(set(data['components']).union({'VA'}))
    species = sorted(unpack_components(dbf, data_comps), key=str)
    data_phases = filter_phases(dbf, species, candidate_phases=data['phases'])
    models = instantiate_models(dbf,
                                species,
                                data_phases,
                                parameters=parameters)
    output = data['output']
    property_output = output.split('_')[
        0]  # property without _FORM, _MIX, etc.
    samples = np.array(data['values']).flatten()
    reference = data.get('reference', '')

    # Models are now modified in response to the data from this data
    if 'reference_states' in data:
        property_output = output[:-1] if output.endswith(
            'R'
        ) else output  # unreferenced model property so we can tell shift_reference_state what to build.
        reference_states = []
        for el, vals in data['reference_states'].items():
            reference_states.append(
                ReferenceState(
                    v.Species(el),
                    vals['phase'],
                    fixed_statevars=vals.get('fixed_state_variables')))
        for mod in models.values():
            mod.shift_reference_state(reference_states,
                                      dbf,
                                      output=(property_output, ))

    data['conditions'].setdefault(
        'N', 1.0
    )  # Add default for N. Nothing else is supported in pycalphad anyway.
    pot_conds = OrderedDict([(getattr(v, key),
                              unpack_condition(data['conditions'][key]))
                             for key in sorted(data['conditions'].keys())
                             if not key.startswith('X_')])
    comp_conds = OrderedDict([(v.X(key[2:]),
                               unpack_condition(data['conditions'][key]))
                              for key in sorted(data['conditions'].keys())
                              if key.startswith('X_')])

    phase_records = build_phase_records(dbf,
                                        species,
                                        data_phases, {
                                            **pot_conds,
                                            **comp_conds
                                        },
                                        models,
                                        parameters=parameters,
                                        build_gradients=True,
                                        build_hessians=True)

    # Now we need to unravel the composition conditions
    # (from Dict[v.X, Sequence[float]] to Sequence[Dict[v.X, float]]), since the
    # composition conditions are only broadcast against the potentials, not
    # each other. Each individual composition needs to be computed
    # independently, since broadcasting over composition cannot be turned off
    # in pycalphad.
    rav_comp_conds = [
        OrderedDict(zip(comp_conds.keys(), pt_comps))
        for pt_comps in zip(*comp_conds.values())
    ]

    # Build weights, should be the same size as the values
    total_num_calculations = len(rav_comp_conds) * np.prod(
        [len(vals) for vals in pot_conds.values()])
    dataset_weights = np.array(data.get('weight',
                                        1.0)) * np.ones(total_num_calculations)
    weights = (property_std_deviation.get(property_output, 1.0) /
               data_weight_dict.get(property_output, 1.0) /
               dataset_weights).flatten()

    return EqPropData(dbf, species, data_phases, pot_conds, rav_comp_conds,
                      models, params_keys, phase_records, output, samples,
                      weights, reference)
Esempio n. 9
0
def calculate(dbf, comps, phases, mode=None, output='GM', fake_points=False, broadcast=True, parameters=None, **kwargs):
    """
    Sample the property surface of 'output' containing the specified
    components and phases. Model parameters are taken from 'dbf' and any
    state variables (T, P, etc.) can be specified as keyword arguments.

    Parameters
    ----------
    dbf : Database
        Thermodynamic database containing the relevant parameters.
    comps : str or sequence
        Names of components to consider in the calculation.
    phases : str or sequence
        Names of phases to consider in the calculation.
    mode : string, optional
        See 'make_callable' docstring for details.
    output : string, optional
        Model attribute to sample.
    fake_points : bool, optional (Default: False)
        If True, the first few points of the output surface will be fictitious
        points used to define an equilibrium hyperplane guaranteed to be above
        all the other points. This is used for convex hull computations.
    broadcast : bool, optional
        If True, broadcast given state variable lists against each other to create a grid.
        If False, assume state variables are given as equal-length lists.
    points : ndarray or a dict of phase names to ndarray, optional
        Columns of ndarrays must be internal degrees of freedom (site fractions), sorted.
        If this is not specified, points will be generated automatically.
    pdens : int, a dict of phase names to int, or a seq of both, optional
        Number of points to sample per degree of freedom.
        Default: 2000; Default when called from equilibrium(): 500
    model : Model, a dict of phase names to Model, or a seq of both, optional
        Model class to use for each phase.
    sampler : callable, a dict of phase names to callable, or a seq of both, optional
        Function to sample phase constitution space.
        Must have same signature as 'pycalphad.core.utils.point_sample'
    grid_points : bool, a dict of phase names to bool, or a seq of both, optional (Default: True)
        Whether to add evenly spaced points between end-members.
        The density of points is determined by 'pdens'
    parameters : dict, optional
        Maps SymPy Symbol to numbers, for overriding the values of parameters in the Database.

    Returns
    -------
    Dataset of the sampled attribute as a function of state variables

    Examples
    --------
    None yet.
    """
    # Here we check for any keyword arguments that are special, i.e.,
    # there may be keyword arguments that aren't state variables
    pdens_dict = unpack_kwarg(kwargs.pop('pdens', 2000), default_arg=2000)
    points_dict = unpack_kwarg(kwargs.pop('points', None), default_arg=None)
    callables = kwargs.pop('callables', {})
    sampler_dict = unpack_kwarg(kwargs.pop('sampler', None), default_arg=None)
    fixedgrid_dict = unpack_kwarg(kwargs.pop('grid_points', True), default_arg=True)
    parameters = parameters or dict()
    if isinstance(parameters, dict):
        parameters = OrderedDict(sorted(parameters.items(), key=str))
    if isinstance(phases, str):
        phases = [phases]
    if isinstance(comps, (str, v.Species)):
        comps = [comps]
    comps = sorted(unpack_components(dbf, comps))
    if points_dict is None and broadcast is False:
        raise ValueError('The \'points\' keyword argument must be specified if broadcast=False is also given.')
    nonvacant_components = [x for x in sorted(comps) if x.number_of_atoms > 0]

    all_phase_data = []
    largest_energy = 1e10

    # Consider only the active phases
    list_of_possible_phases = filter_phases(dbf, comps)
    active_phases = sorted(set(list_of_possible_phases).intersection(set(phases)))
    active_phases = {name: dbf.phases[name] for name in active_phases}
    if len(list_of_possible_phases) == 0:
        raise ConditionError('There are no phases in the Database that can be active with components {0}'.format(comps))
    if len(active_phases) == 0:
        raise ConditionError('None of the passed phases ({0}) are active. List of possible phases: {1}.'
                             .format(phases, list_of_possible_phases))

    models = instantiate_models(dbf, comps, list(active_phases.keys()), model=kwargs.pop('model', None), parameters=parameters)

    if isinstance(output, (list, tuple, set)):
        raise NotImplementedError('Only one property can be specified in calculate() at a time')
    output = output if output is not None else 'GM'

    # Implicitly add 'N' state variable as a string to keyword arguements if it's not passed
    if kwargs.get('N') is None:
        kwargs['N'] = 1
    if np.any(np.array(kwargs['N']) != 1):
        raise ConditionError('N!=1 is not yet supported, got N={}'.format(kwargs['N']))

    # TODO: conditions dict of StateVariable instances should become part of the calculate API
    statevar_strings = [sv for sv in kwargs.keys() if getattr(v, sv) is not None]
    # If we don't do this, sympy will get confused during substitution
    statevar_dict = dict((v.StateVariable(key), unpack_condition(value)) for key, value in kwargs.items() if key in statevar_strings)
    # Sort after default state variable check to fix gh-116
    statevar_dict = collections.OrderedDict(sorted(statevar_dict.items(), key=lambda x: str(x[0])))
    phase_records = build_phase_records(dbf, comps, active_phases, statevar_dict,
                                   models=models, parameters=parameters,
                                   output=output, callables=callables,
                                   verbose=kwargs.pop('verbose', False))
    str_statevar_dict = collections.OrderedDict((str(key), unpack_condition(value)) \
                                                for (key, value) in statevar_dict.items())
    maximum_internal_dof = max(len(models[phase_name].site_fractions) for phase_name in active_phases)
    for phase_name, phase_obj in sorted(active_phases.items()):
        mod = models[phase_name]
        phase_record = phase_records[phase_name]
        points = points_dict[phase_name]
        variables, sublattice_dof = generate_dof(phase_obj, mod.components)
        if points is None:
            points = _sample_phase_constitution(phase_name, phase_obj.constituents, sublattice_dof, comps,
                                                tuple(variables), sampler_dict[phase_name] or point_sample,
                                                fixedgrid_dict[phase_name], pdens_dict[phase_name])
        points = np.atleast_2d(points)

        fp = fake_points and (phase_name == sorted(active_phases.keys())[0])
        phase_ds = _compute_phase_values(nonvacant_components, str_statevar_dict,
                                         points, phase_record, output,
                                         maximum_internal_dof, broadcast=broadcast,
                                         largest_energy=float(largest_energy), fake_points=fp)
        all_phase_data.append(phase_ds)

    # speedup for single-phase case (found by profiling)
    if len(all_phase_data) > 1:
        final_ds = concat(all_phase_data, dim='points')
        final_ds['points'].values = np.arange(len(final_ds['points']))
        final_ds.coords['points'].values = np.arange(len(final_ds['points']))
    else:
        final_ds = all_phase_data[0]
    return final_ds
Esempio n. 10
0
def calculate(dbf,
              comps,
              phases,
              mode=None,
              output='GM',
              fake_points=False,
              broadcast=True,
              parameters=None,
              to_xarray=True,
              **kwargs):
    """
    Sample the property surface of 'output' containing the specified
    components and phases. Model parameters are taken from 'dbf' and any
    state variables (T, P, etc.) can be specified as keyword arguments.

    Parameters
    ----------
    dbf : Database
        Thermodynamic database containing the relevant parameters.
    comps : str or sequence
        Names of components to consider in the calculation.
    phases : str or sequence
        Names of phases to consider in the calculation.
    mode : string, optional
        See 'make_callable' docstring for details.
    output : string, optional
        Model attribute to sample.
    fake_points : bool, optional (Default: False)
        If True, the first few points of the output surface will be fictitious
        points used to define an equilibrium hyperplane guaranteed to be above
        all the other points. This is used for convex hull computations.
    broadcast : bool, optional
        If True, broadcast given state variable lists against each other to create a grid.
        If False, assume state variables are given as equal-length lists.
    points : ndarray or a dict of phase names to ndarray, optional
        Columns of ndarrays must be internal degrees of freedom (site fractions), sorted.
        If this is not specified, points will be generated automatically.
    pdens : int, a dict of phase names to int, or a seq of both, optional
        Number of points to sample per degree of freedom.
        Default: 2000; Default when called from equilibrium(): 500
    model : Model, a dict of phase names to Model, or a seq of both, optional
        Model class to use for each phase.
    sampler : callable, a dict of phase names to callable, or a seq of both, optional
        Function to sample phase constitution space.
        Must have same signature as 'pycalphad.core.utils.point_sample'
    grid_points : bool, a dict of phase names to bool, or a seq of both, optional (Default: True)
        Whether to add evenly spaced points between end-members.
        The density of points is determined by 'pdens'
    parameters : dict, optional
        Maps SymPy Symbol to numbers, for overriding the values of parameters in the Database.

    Returns
    -------
    Dataset of the sampled attribute as a function of state variables

    Examples
    --------
    None yet.
    """
    # Here we check for any keyword arguments that are special, i.e.,
    # there may be keyword arguments that aren't state variables
    pdens_dict = unpack_kwarg(kwargs.pop('pdens', 2000), default_arg=2000)
    points_dict = unpack_kwarg(kwargs.pop('points', None), default_arg=None)
    callables = kwargs.pop('callables', {})
    sampler_dict = unpack_kwarg(kwargs.pop('sampler', None), default_arg=None)
    fixedgrid_dict = unpack_kwarg(kwargs.pop('grid_points', True),
                                  default_arg=True)
    parameters = parameters or dict()
    if isinstance(parameters, dict):
        parameters = OrderedDict(sorted(parameters.items(), key=str))
    if isinstance(phases, str):
        phases = [phases]
    if isinstance(comps, (str, v.Species)):
        comps = [comps]
    comps = sorted(unpack_components(dbf, comps))
    if points_dict is None and broadcast is False:
        raise ValueError(
            'The \'points\' keyword argument must be specified if broadcast=False is also given.'
        )
    nonvacant_components = [x for x in sorted(comps) if x.number_of_atoms > 0]

    all_phase_data = []
    largest_energy = 1e10

    # Consider only the active phases
    list_of_possible_phases = filter_phases(dbf, comps)
    if len(list_of_possible_phases) == 0:
        raise ConditionError(
            'There are no phases in the Database that can be active with components {0}'
            .format(comps))
    active_phases = {
        name: dbf.phases[name]
        for name in filter_phases(dbf, comps, phases)
    }
    if len(active_phases) == 0:
        raise ConditionError(
            'None of the passed phases ({0}) are active. List of possible phases: {1}.'
            .format(phases, list_of_possible_phases))

    models = instantiate_models(dbf,
                                comps,
                                list(active_phases.keys()),
                                model=kwargs.pop('model', None),
                                parameters=parameters)

    if isinstance(output, (list, tuple, set)):
        raise NotImplementedError(
            'Only one property can be specified in calculate() at a time')
    output = output if output is not None else 'GM'

    # Implicitly add 'N' state variable as a string to keyword arguements if it's not passed
    if kwargs.get('N') is None:
        kwargs['N'] = 1
    if np.any(np.array(kwargs['N']) != 1):
        raise ConditionError('N!=1 is not yet supported, got N={}'.format(
            kwargs['N']))

    # TODO: conditions dict of StateVariable instances should become part of the calculate API
    statevar_strings = [
        sv for sv in kwargs.keys() if getattr(v, sv) is not None
    ]
    # If we don't do this, sympy will get confused during substitution
    statevar_dict = dict((v.StateVariable(key), unpack_condition(value))
                         for key, value in kwargs.items()
                         if key in statevar_strings)
    # Sort after default state variable check to fix gh-116
    statevar_dict = collections.OrderedDict(
        sorted(statevar_dict.items(), key=lambda x: str(x[0])))
    phase_records = build_phase_records(dbf,
                                        comps,
                                        active_phases,
                                        statevar_dict,
                                        models=models,
                                        parameters=parameters,
                                        output=output,
                                        callables=callables,
                                        build_gradients=False,
                                        build_hessians=False,
                                        verbose=kwargs.pop('verbose', False))
    str_statevar_dict = collections.OrderedDict((str(key), unpack_condition(value)) \
                                                for (key, value) in statevar_dict.items())
    maximum_internal_dof = max(
        len(models[phase_name].site_fractions) for phase_name in active_phases)
    for phase_name, phase_obj in sorted(active_phases.items()):
        mod = models[phase_name]
        phase_record = phase_records[phase_name]
        points = points_dict[phase_name]
        variables, sublattice_dof = generate_dof(phase_obj, mod.components)
        if points is None:
            points = _sample_phase_constitution(
                phase_name, phase_obj.constituents, sublattice_dof, comps,
                tuple(variables), sampler_dict[phase_name] or point_sample,
                fixedgrid_dict[phase_name], pdens_dict[phase_name])
        points = np.atleast_2d(points)

        fp = fake_points and (phase_name == sorted(active_phases.keys())[0])
        phase_ds = _compute_phase_values(nonvacant_components,
                                         str_statevar_dict,
                                         points,
                                         phase_record,
                                         output,
                                         maximum_internal_dof,
                                         broadcast=broadcast,
                                         largest_energy=float(largest_energy),
                                         fake_points=fp)
        all_phase_data.append(phase_ds)

    # speedup for single-phase case (found by profiling)
    if len(all_phase_data) > 1:
        concatenated_coords = all_phase_data[0].coords

        data_vars = all_phase_data[0].data_vars
        concatenated_data_vars = {}
        for var in data_vars.keys():
            data_coords = data_vars[var][0]
            points_idx = data_coords.index('points')  # concatenation axis
            arrs = []
            for phase_data in all_phase_data:
                arrs.append(getattr(phase_data, var))
            concat_data = np.concatenate(arrs, axis=points_idx)
            concatenated_data_vars[var] = (data_coords, concat_data)
        final_ds = LightDataset(data_vars=concatenated_data_vars,
                                coords=concatenated_coords)
    else:
        final_ds = all_phase_data[0]
    if to_xarray:
        return final_ds.get_dataset()
    else:
        return final_ds
Esempio n. 11
0
def equilibrium(dbf, comps, phases, conditions, output=None, model=None,
                verbose=False, broadcast=True, calc_opts=None,
                scheduler='sync', parameters=None, solver=None, callables=None,
                **kwargs):
    """
    Calculate the equilibrium state of a system containing the specified
    components and phases, under the specified conditions.

    Parameters
    ----------
    dbf : Database
        Thermodynamic database containing the relevant parameters.
    comps : list
        Names of components to consider in the calculation.
    phases : list or dict
        Names of phases to consider in the calculation.
    conditions : dict or (list of dict)
        StateVariables and their corresponding value.
    output : str or list of str, optional
        Additional equilibrium model properties (e.g., CPM, HM, etc.) to compute.
        These must be defined as attributes in the Model class of each phase.
    model : Model, a dict of phase names to Model, or a seq of both, optional
        Model class to use for each phase.
    verbose : bool, optional
        Print details of calculations. Useful for debugging.
    broadcast : bool
        If True, broadcast conditions against each other. This will compute all combinations.
        If False, each condition should be an equal-length list (or single-valued).
        Disabling broadcasting is useful for calculating equilibrium at selected conditions,
        when those conditions don't comprise a grid.
    calc_opts : dict, optional
        Keyword arguments to pass to `calculate`, the energy/property calculation routine.
    scheduler : Dask scheduler, optional
        Job scheduler for performing the computation.
        If None, return a Dask graph of the computation instead of actually doing it.
    parameters : dict, optional
        Maps SymPy Symbol to numbers, for overriding the values of parameters in the Database.
    solver : pycalphad.core.solver.SolverBase
        Instance of a solver that is used to calculate local equilibria.
        Defaults to a pycalphad.core.solver.InteriorPointSolver.
    callables : dict, optional
        Pre-computed callable functions for equilibrium calculation.

    Returns
    -------
    Structured equilibrium calculation, or Dask graph if scheduler=None.

    Examples
    --------
    None yet.
    """
    if not broadcast:
        raise NotImplementedError('Broadcasting cannot yet be disabled')
    comps = sorted(unpack_components(dbf, comps))
    phases = unpack_phases(phases) or sorted(dbf.phases.keys())
    # remove phases that cannot be active
    list_of_possible_phases = filter_phases(dbf, comps)
    active_phases = sorted(set(list_of_possible_phases).intersection(set(phases)))
    if len(list_of_possible_phases) == 0:
        raise ConditionError('There are no phases in the Database that can be active with components {0}'.format(comps))
    if len(active_phases) == 0:
        raise ConditionError('None of the passed phases ({0}) are active. List of possible phases: {1}.'.format(phases, list_of_possible_phases))
    if isinstance(comps, (str, v.Species)):
        comps = [comps]
    if len(set(comps) - set(dbf.species)) > 0:
        raise EquilibriumError('Components not found in database: {}'
                               .format(','.join([c.name for c in (set(comps) - set(dbf.species))])))
    calc_opts = calc_opts if calc_opts is not None else dict()
    solver = solver if solver is not None else InteriorPointSolver(verbose=verbose)
    parameters = parameters if parameters is not None else dict()
    if isinstance(parameters, dict):
        parameters = OrderedDict(sorted(parameters.items(), key=str))
    models = instantiate_models(dbf, comps, active_phases, model=model, parameters=parameters)
    # Temporary solution until constraint system improves
    if conditions.get(v.N) is None:
        conditions[v.N] = 1
    if np.any(np.array(conditions[v.N]) != 1):
        raise ConditionError('N!=1 is not yet supported, got N={}'.format(conditions[v.N]))
    # Modify conditions values to be within numerical limits, e.g., X(AL)=0
    # Also wrap single-valued conditions with lists
    conds = _adjust_conditions(conditions)

    for cond in conds.keys():
        if isinstance(cond, (v.Composition, v.ChemicalPotential)) and cond.species not in comps:
            raise ConditionError('{} refers to non-existent component'.format(cond))
    state_variables = sorted(get_state_variables(models=models, conds=conds), key=str)
    str_conds = OrderedDict((str(key), value) for key, value in conds.items())
    num_calcs = np.prod([len(i) for i in str_conds.values()])
    components = [x for x in sorted(comps)]
    desired_active_pure_elements = [list(x.constituents.keys()) for x in components]
    desired_active_pure_elements = [el.upper() for constituents in desired_active_pure_elements for el in constituents]
    pure_elements = sorted(set([x for x in desired_active_pure_elements if x != 'VA']))
    if verbose:
        print('Components:', ' '.join([str(x) for x in comps]))
        print('Phases:', end=' ')
    output = output if output is not None else 'GM'
    output = output if isinstance(output, (list, tuple, set)) else [output]
    output = set(output)
    output |= {'GM'}
    output = sorted(output)
    need_hessians = any(type(c) in v.CONDITIONS_REQUIRING_HESSIANS for c in conds.keys())
    phase_records = build_phase_records(dbf, comps, active_phases, conds, models,
                                        output='GM', callables=callables,
                                        parameters=parameters, verbose=verbose,
                                        build_gradients=True, build_hessians=need_hessians)
    if verbose:
        print('[done]', end='\n')

    # 'calculate' accepts conditions through its keyword arguments
    grid_opts = calc_opts.copy()
    statevar_strings = [str(x) for x in state_variables]
    grid_opts.update({key: value for key, value in str_conds.items() if key in statevar_strings})
    if 'pdens' not in grid_opts:
        grid_opts['pdens'] = 500
    grid = delayed(calculate, pure=False)(dbf, comps, active_phases,
                                          model=models, fake_points=True,
                                          callables=callables, output='GM',
                                          parameters=parameters, **grid_opts)
    coord_dict = str_conds.copy()
    coord_dict['vertex'] = np.arange(
        len(pure_elements) + 1)  # +1 is to accommodate the degenerate degree of freedom at the invariant reactions
    coord_dict['component'] = pure_elements
    grid_shape = tuple(len(x) for x in conds.values()) + (len(pure_elements)+1,)
    properties = delayed(starting_point, pure=False)(conds, state_variables, phase_records, grid)
    conditions_per_chunk_per_axis = 2
    if num_calcs > 1:
        # Generate slices of 'properties'
        slices = []
        for val in grid_shape[:-1]:
            idx_arr = list(range(val))
            num_chunks = int(np.floor(val/conditions_per_chunk_per_axis))
            if num_chunks > 0:
                cond_slices = [x for x in np.array_split(np.asarray(idx_arr), num_chunks) if len(x) > 0]
            else:
                cond_slices = [idx_arr]
            slices.append(cond_slices)
        chunk_dims = [len(slc) for slc in slices]
        chunk_grid = np.array(np.unravel_index(np.arange(np.prod(chunk_dims)), chunk_dims)).T
        res = []
        for chunk in chunk_grid:
            prop_slice = properties[OrderedDict(list(zip(str_conds.keys(),
                                                         [np.atleast_1d(sl)[ch] for ch, sl in zip(chunk, slices)])))]
            job = delayed(_solve_eq_at_conditions, pure=False)(comps, prop_slice, phase_records, grid,
                                                               list(str_conds.keys()), state_variables, verbose, solver=solver)
            res.append(job)
        properties = delayed(_merge_property_slices, pure=False)(properties, chunk_grid, slices, list(str_conds.keys()), res)
    else:
        # Single-process job; don't create child processes
        properties = delayed(_solve_eq_at_conditions, pure=False)(comps, properties, phase_records, grid,
                                                                  list(str_conds.keys()), state_variables, verbose, solver=solver)

    # Compute equilibrium values of any additional user-specified properties
    # We already computed these properties so don't recompute them
    output = sorted(set(output) - {'GM', 'MU'})
    for out in output:
        if (out is None) or (len(out) == 0):
            continue
        # TODO: How do we know if a specified property should be per_phase or not?
        # For now, we make a best guess
        if (out == 'degree_of_ordering') or (out == 'DOO'):
            per_phase = True
        else:
            per_phase = False
        eqcal = delayed(_eqcalculate, pure=False)(dbf, comps, active_phases, conditions, out,
                                                  data=properties, per_phase=per_phase,
                                                  callables=callables,
                                                  parameters=parameters,
                                                  model=models, **calc_opts)
        properties = delayed(properties.merge, pure=False)(eqcal, compat='equals')
    if scheduler is not None:
        properties = dask.compute(properties, scheduler=scheduler)[0]
    properties.attrs['created'] = datetime.utcnow().isoformat()
    if len(kwargs) > 0:
        warnings.warn('The following equilibrium keyword arguments were passed, but unused:\n{}'.format(kwargs))
    return properties
Esempio n. 12
0
def equilibrium(dbf,
                comps,
                phases,
                conditions,
                output=None,
                model=None,
                verbose=False,
                broadcast=True,
                calc_opts=None,
                to_xarray=True,
                scheduler='sync',
                parameters=None,
                solver=None,
                callables=None,
                **kwargs):
    """
    Calculate the equilibrium state of a system containing the specified
    components and phases, under the specified conditions.

    Parameters
    ----------
    dbf : Database
        Thermodynamic database containing the relevant parameters.
    comps : list
        Names of components to consider in the calculation.
    phases : list or dict
        Names of phases to consider in the calculation.
    conditions : dict or (list of dict)
        StateVariables and their corresponding value.
    output : str or list of str, optional
        Additional equilibrium model properties (e.g., CPM, HM, etc.) to compute.
        These must be defined as attributes in the Model class of each phase.
    model : Model, a dict of phase names to Model, or a seq of both, optional
        Model class to use for each phase.
    verbose : bool, optional
        Print details of calculations. Useful for debugging.
    broadcast : bool
        If True, broadcast conditions against each other. This will compute all combinations.
        If False, each condition should be an equal-length list (or single-valued).
        Disabling broadcasting is useful for calculating equilibrium at selected conditions,
        when those conditions don't comprise a grid.
    calc_opts : dict, optional
        Keyword arguments to pass to `calculate`, the energy/property calculation routine.
    to_xarray : bool
        Whether to return an xarray Dataset (True, default) or an EquilibriumResult.
    scheduler : Dask scheduler, optional
        Job scheduler for performing the computation.
        If None, return a Dask graph of the computation instead of actually doing it.
    parameters : dict, optional
        Maps SymPy Symbol to numbers, for overriding the values of parameters in the Database.
    solver : pycalphad.core.solver.SolverBase
        Instance of a solver that is used to calculate local equilibria.
        Defaults to a pycalphad.core.solver.InteriorPointSolver.
    callables : dict, optional
        Pre-computed callable functions for equilibrium calculation.

    Returns
    -------
    Structured equilibrium calculation, or Dask graph if scheduler=None.

    Examples
    --------
    None yet.
    """
    if not broadcast:
        raise NotImplementedError('Broadcasting cannot yet be disabled')
    comps = sorted(unpack_components(dbf, comps))
    phases = unpack_phases(phases) or sorted(dbf.phases.keys())
    list_of_possible_phases = filter_phases(dbf, comps)
    if len(list_of_possible_phases) == 0:
        raise ConditionError(
            'There are no phases in the Database that can be active with components {0}'
            .format(comps))
    active_phases = {
        name: dbf.phases[name]
        for name in filter_phases(dbf, comps, phases)
    }
    if len(active_phases) == 0:
        raise ConditionError(
            'None of the passed phases ({0}) are active. List of possible phases: {1}.'
            .format(phases, list_of_possible_phases))
    if isinstance(comps, (str, v.Species)):
        comps = [comps]
    if len(set(comps) - set(dbf.species)) > 0:
        raise EquilibriumError('Components not found in database: {}'.format(
            ','.join([c.name for c in (set(comps) - set(dbf.species))])))
    calc_opts = calc_opts if calc_opts is not None else dict()
    solver = solver if solver is not None else InteriorPointSolver(
        verbose=verbose)
    parameters = parameters if parameters is not None else dict()
    if isinstance(parameters, dict):
        parameters = OrderedDict(sorted(parameters.items(), key=str))
    models = instantiate_models(dbf,
                                comps,
                                active_phases,
                                model=model,
                                parameters=parameters)
    # Temporary solution until constraint system improves
    if conditions.get(v.N) is None:
        conditions[v.N] = 1
    if np.any(np.array(conditions[v.N]) != 1):
        raise ConditionError('N!=1 is not yet supported, got N={}'.format(
            conditions[v.N]))
    # Modify conditions values to be within numerical limits, e.g., X(AL)=0
    # Also wrap single-valued conditions with lists
    conds = _adjust_conditions(conditions)

    for cond in conds.keys():
        if isinstance(cond,
                      (v.Composition,
                       v.ChemicalPotential)) and cond.species not in comps:
            raise ConditionError(
                '{} refers to non-existent component'.format(cond))
    state_variables = sorted(get_state_variables(models=models, conds=conds),
                             key=str)
    str_conds = OrderedDict((str(key), value) for key, value in conds.items())
    components = [x for x in sorted(comps)]
    desired_active_pure_elements = [
        list(x.constituents.keys()) for x in components
    ]
    desired_active_pure_elements = [
        el.upper() for constituents in desired_active_pure_elements
        for el in constituents
    ]
    pure_elements = sorted(
        set([x for x in desired_active_pure_elements if x != 'VA']))
    if verbose:
        print('Components:', ' '.join([str(x) for x in comps]))
        print('Phases:', end=' ')
    output = output if output is not None else 'GM'
    output = output if isinstance(output, (list, tuple, set)) else [output]
    output = set(output)
    output |= {'GM'}
    output = sorted(output)
    phase_records = build_phase_records(dbf,
                                        comps,
                                        active_phases,
                                        conds,
                                        models,
                                        output='GM',
                                        callables=callables,
                                        parameters=parameters,
                                        verbose=verbose,
                                        build_gradients=True,
                                        build_hessians=True)
    if verbose:
        print('[done]', end='\n')

    # 'calculate' accepts conditions through its keyword arguments
    grid_opts = calc_opts.copy()
    statevar_strings = [str(x) for x in state_variables]
    grid_opts.update({
        key: value
        for key, value in str_conds.items() if key in statevar_strings
    })
    if 'pdens' not in grid_opts:
        grid_opts['pdens'] = 500
    grid = calculate(dbf,
                     comps,
                     active_phases,
                     model=models,
                     fake_points=True,
                     callables=callables,
                     output='GM',
                     parameters=parameters,
                     to_xarray=False,
                     **grid_opts)
    coord_dict = str_conds.copy()
    coord_dict['vertex'] = np.arange(
        len(pure_elements) + 1
    )  # +1 is to accommodate the degenerate degree of freedom at the invariant reactions
    coord_dict['component'] = pure_elements
    properties = starting_point(conds, state_variables, phase_records, grid)
    properties = _solve_eq_at_conditions(comps,
                                         properties,
                                         phase_records,
                                         grid,
                                         list(str_conds.keys()),
                                         state_variables,
                                         verbose,
                                         solver=solver)

    # Compute equilibrium values of any additional user-specified properties
    # We already computed these properties so don't recompute them
    output = sorted(set(output) - {'GM', 'MU'})
    for out in output:
        if (out is None) or (len(out) == 0):
            continue
        # TODO: How do we know if a specified property should be per_phase or not?
        # For now, we make a best guess
        if (out == 'degree_of_ordering') or (out == 'DOO'):
            per_phase = True
        else:
            per_phase = False
        eqcal = _eqcalculate(dbf,
                             comps,
                             active_phases,
                             conditions,
                             out,
                             data=properties,
                             per_phase=per_phase,
                             model=models,
                             callables=callables,
                             parameters=parameters,
                             **calc_opts)
        properties = properties.merge(eqcal, inplace=True, compat='equals')
    if to_xarray:
        properties = properties.get_dataset()
    properties.attrs['created'] = datetime.utcnow().isoformat()
    if len(kwargs) > 0:
        warnings.warn(
            'The following equilibrium keyword arguments were passed, but unused:\n{}'
            .format(kwargs))
    return properties
Esempio n. 13
0
def calculate_driving_force(dbf,
                            data_comps,
                            phases,
                            current_statevars,
                            ph_cond_dict,
                            phase_models,
                            phase_dict,
                            parameters,
                            callables,
                            tol=0.001,
                            max_it=50):
    """
    Calculates driving force for a single data point.

    Parameters
    ----------
    dbf : pycalphad.Database
        Database to consider
    data_comps : list
        List of active component names
    phases : list
        List of phases to consider
    current_statevars : dict
        Dictionary of state variables, e.g. v.P and v.T, no compositions.
    ph_cond_dict : dict
        Dictionary mapping phases to the conditions at which they occurred in experiment.
    phase_models : dict
        Phase models to pass to pycalphad calculations
    parameters : dict
        Dictionary of symbols that will be overridden in pycalphad.equilibrium
    callables : dict
        Callables to pass to pycalphad
    tol: double
        The tolerance allowed for optimization over hyperplanes.
    max_it: int
        The maximum number of iterations allowed for optimization over hyperplanes.

    Notes
    ------
    Calculates the driving force by optimizing the driving force over the chemical potential.
    Allow calculation of the driving force even when both tie points are missing.
    """
    # TODO Refactor absurd unpacking which represents a significant overhead.
    species = list(map(v.Species, data_comps))
    conditions = current_statevars
    if conditions.get(v.N) is None:
        conditions[v.N] = 1.0
    if np.any(np.array(conditions[v.N]) != 1):
        raise ConditionError('N!=1 is not yet supported, got N={}'.format(
            conditions[v.N]))
    conds = conditions
    str_conds = OrderedDict([(str(key), conds[key])
                             for key in sorted(conds.keys(), key=str)])
    models = instantiate_models(dbf,
                                data_comps,
                                phases,
                                model=phase_models,
                                parameters=parameters)
    prxs = build_phase_records(dbf,
                               species,
                               phases,
                               conds,
                               models,
                               build_gradients=True,
                               build_hessians=True,
                               callables=callables,
                               parameters=parameters)
    # Collect data information in phase_dict.
    for phase in phases:
        phase_dict[phase]['data'] = False
    for ph, cond in ph_cond_dict:
        has_nones = False
        ph_conds = cond[0]
        phase_dict[ph]['data'] = True
        for key in ph_conds:
            if ph_conds[key] is None:
                has_nones = True
                phase_dict[ph]['phase_record'] = None
                phase_dict[ph]['str_conds'] = None
        if not has_nones:
            ph_conds.update(conditions)
            phase_records = build_phase_records(dbf,
                                                species, [ph],
                                                ph_conds,
                                                models,
                                                build_gradients=True,
                                                build_hessians=True,
                                                callables=callables,
                                                parameters=parameters)
            phase_dict[ph]['phase_record'] = phase_records[ph]
            phase_dict[ph]['str_conds'] = OrderedDict([
                (str(key), ph_conds[key])
                for key in sorted(ph_conds.keys(), key=str)
            ])
            phase_dict[ph]['min_energy'] = None
    # Collect sampling and equilibrium information in phase_dict.
    for phase in phases:
        # If sample points have not yet been calculated for this phase, calculate them.
        if not 'sample_points' in phase_dict[phase]:
            phase_obj = dbf.phases[phase]
            components = models[phase].components
            variables, sublattice_dof = generate_dof(phase_obj, components)
            sample_points = _sample_phase_constitution(
                phase, phase_obj.constituents, sublattice_dof, data_comps,
                tuple(variables), point_sample, True, 2000)
            phase_dict[phase]['sample_points'] = sample_points
        # If composition values have not yet been calculated for this phase, calculate them.
        if not 'composition_values' in phase_dict[phase]:
            composition_values = np.zeros(
                (sample_points.shape[0],
                 len([sp for sp in species if sp.__str__() != 'VA'])))
            temp_comp_set = CompositionSet(prxs[phase])
            current_state_variables = np.array(
                [str_conds[key] for key in sorted(str_conds.keys(), key=str)])
            for i in range(sample_points.shape[0]):
                temp_comp_set.py_update(sample_points[i, :], np.array([1.0]),
                                        current_state_variables, False)
                composition_values[i, :] = temp_comp_set.X
            phase_dict[phase]['composition_values'] = composition_values
        energies = calculate(dbf,
                             data_comps, [phase],
                             points=phase_dict[phase]['sample_points'],
                             to_xarray=False,
                             **str_conds)
        phase_dict[phase]['energy_values'] = np.array(energies['GM'][0][0][0])
    hyperplane = generate_random_hyperplane(species)
    result = calculate_driving_force_at_chem_potential(dbf,
                                                       hyperplane,
                                                       species,
                                                       phase_dict,
                                                       prxs,
                                                       str_conds,
                                                       approx=True)
    # Ignore entire data point if pointsolver fails to converge.
    if result is None:
        return 0
    # Optimize over the hyperplane.
    it = 0
    current_driving_force = result['driving_force']
    new_plane = result['new_plane']
    last_plane = new_plane
    while np.linalg.norm(hyperplane - last_plane) > tol and it < max_it:
        it += 1
        last_plane = hyperplane
        result = calculate_driving_force_at_chem_potential(dbf,
                                                           new_plane,
                                                           species,
                                                           phase_dict,
                                                           prxs,
                                                           str_conds,
                                                           approx=True)
        # If step results in objective decrease, accept the step.
        if result['driving_force'] < current_driving_force:
            current_driving_force = result['driving_force']
            hyperplane = new_plane
            new_plane = result['new_plane']
        else:
            step = 0.5
            temp_hyperplane = new_plane
            while result[
                    'driving_force'] > current_driving_force and np.linalg.norm(
                        hyperplane - temp_hyperplane) > tol:
                temp_hyperplane = (1.0 - step) * hyperplane + step * new_plane
                result = calculate_driving_force_at_chem_potential(
                    dbf,
                    temp_hyperplane,
                    species,
                    phase_dict,
                    prxs,
                    str_conds,
                    approx=True)
                step /= 2
            hyperplane = temp_hyperplane
            result = calculate_driving_force_at_chem_potential(dbf,
                                                               hyperplane,
                                                               species,
                                                               phase_dict,
                                                               prxs,
                                                               str_conds,
                                                               approx=True)
            new_plane = result['new_plane']
            current_driving_force = result['driving_force']
    final_result = calculate_driving_force_at_chem_potential(dbf,
                                                             hyperplane,
                                                             species,
                                                             phase_dict,
                                                             prxs,
                                                             str_conds,
                                                             approx=True)
    final_driving_force = final_result['driving_force']
    print(it, final_driving_force, hyperplane)
    return final_driving_force
Esempio n. 14
0
def get_zpf_data(dbf: Database, comps: Sequence[str], phases: Sequence[str],
                 datasets: PickleableTinyDB, parameters: Dict[str, float]):
    """
    Return the ZPF data used in the calculation of ZPF error

    Parameters
    ----------
    comps : list
        List of active component names
    phases : list
        List of phases to consider
    datasets : espei.utils.PickleableTinyDB
        Datasets that contain single phase data
    parameters : dict
        Dictionary mapping symbols to optimize to their initial values

    Returns
    -------
    list
        List of data dictionaries with keys ``weight``, ``data_comps`` and
        ``phase_regions``. ``data_comps`` are the components for the data in
        question. ``phase_regions`` are the ZPF phases, state variables and compositions.
    """
    desired_data = datasets.search(
        (tinydb.where('output') == 'ZPF')
        & (tinydb.where('components').test(lambda x: set(x).issubset(comps)))
        & (tinydb.where('phases').test(
            lambda x: len(set(phases).intersection(x)) > 0)))

    zpf_data = []  # 1:1 correspondence with each dataset
    for data in desired_data:
        data_comps = list(set(data['components']).union({'VA'}))
        species = sorted(unpack_components(dbf, data_comps), key=str)
        data_phases = filter_phases(dbf, species, candidate_phases=phases)
        models = instantiate_models(dbf,
                                    species,
                                    data_phases,
                                    parameters=parameters)
        all_regions = data['values']
        conditions = data['conditions']
        phase_regions = []
        # Each phase_region is one set of phases in equilibrium (on a tie-line),
        # e.g. [["ALPHA", ["B"], [0.25]], ["BETA", ["B"], [0.5]]]
        for idx, phase_region in enumerate(all_regions):
            # We need to construct a PhaseRegion by matching up phases/compositions to the conditions
            if len(phase_region) < 2:
                # Skip single-phase regions for fitting purposes
                continue
            # Extract the conditions for entire phase region
            region_potential_conds = extract_conditions(conditions, idx)
            region_potential_conds[v.N] = region_potential_conds.get(
                v.N) or 1.0  # Add v.N condition, if missing
            # Extract all the phases and compositions from the tie-line points
            region_phases, region_comp_conds, phase_flags = extract_phases_comps(
                phase_region)
            region_phase_records = [
                build_phase_records(dbf,
                                    species,
                                    data_phases, {
                                        **region_potential_conds,
                                        **comp_conds
                                    },
                                    models,
                                    parameters=parameters,
                                    build_gradients=True,
                                    build_hessians=True)
                for comp_conds in region_comp_conds
            ]
            phase_regions.append(
                PhaseRegion(region_phases, region_potential_conds,
                            region_comp_conds, phase_flags, dbf, species,
                            data_phases, models, region_phase_records))

        data_dict = {
            'weight': data.get('weight', 1.0),
            'data_comps': data_comps,
            'phase_regions': phase_regions,
            'dataset_reference': data['reference']
        }
        zpf_data.append(data_dict)
    return zpf_data