Beispiel #1
0
def _generate_fake_points(components, statevar_dict, energy_limit, output,
                          maximum_internal_dof):
    """
    Generate points for a fictitious hyperplane used as a starting point for energy minimization.
    """
    coordinate_dict = {'component': components}
    coordinate_dict.update(
        {str(key): value
         for key, value in statevar_dict.items()})
    largest_energy = float(energy_limit)
    if largest_energy < 0:
        largest_energy *= 0.99
    else:
        largest_energy *= 1.01
    output_columns = [str(x) for x in statevar_dict.keys()] + ['points']
    statevar_shape = tuple(
        len(np.atleast_1d(x)) for x in statevar_dict.values())
    # The internal dof for the fake points are all NaNs
    expanded_points = np.full(
        statevar_shape + (len(components), maximum_internal_dof), np.nan)
    data_arrays = {
        'X':
        (output_columns + ['component'],
         broadcast_to(np.eye(len(components)),
                      statevar_shape + (len(components), len(components)))),
        'Y': (output_columns + ['internal_dof'], expanded_points),
        'Phase': (output_columns,
                  np.full(statevar_shape + (len(components), ),
                          '_FAKE_',
                          dtype='S6')),
        output: (output_columns,
                 np.full(statevar_shape + (len(components), ), largest_energy))
    }
    return xray.Dataset(data_arrays, coords=coordinate_dict)
Beispiel #2
0
def _generate_fake_points(components, statevar_dict, energy_limit, output, maximum_internal_dof):
    """
    Generate points for a fictitious hyperplane used as a starting point for energy minimization.
    """
    coordinate_dict = {"component": components}
    coordinate_dict.update({str(key): value for key, value in statevar_dict.items()})
    largest_energy = float(energy_limit)
    if largest_energy < 0:
        largest_energy *= 0.99
    else:
        largest_energy *= 1.01
    output_columns = [str(x) for x in statevar_dict.keys()] + ["points"]
    statevar_shape = tuple(len(np.atleast_1d(x)) for x in statevar_dict.values())
    # The internal dof for the fake points are all NaNs
    expanded_points = np.full(statevar_shape + (len(components), maximum_internal_dof), np.nan)
    data_arrays = {
        "X": (
            output_columns + ["component"],
            broadcast_to(np.eye(len(components)), statevar_shape + (len(components), len(components))),
        ),
        "Y": (output_columns + ["internal_dof"], expanded_points),
        "Phase": (output_columns, np.full(statevar_shape + (len(components),), "_FAKE_", dtype="S6")),
        output: (output_columns, np.full(statevar_shape + (len(components),), largest_energy)),
    }
    return xray.Dataset(data_arrays, coords=coordinate_dict)
Beispiel #3
0
def _compute_phase_values(phase_obj, components, variables, statevar_dict,
                          points, func, output, maximum_internal_dof):
    """
    Calculate output values for a particular phase.

    Parameters
    ----------
    phase_obj : Phase
        Phase object from a thermodynamic database.
    components : list
        Names of components to consider in the calculation.
    variables : list
        Names of variables in the phase's internal degrees of freedom.
    statevar_dict : OrderedDict {str -> float or sequence}
        Mapping of state variables to desired values. This will broadcast if necessary.
    points : ndarray
        Inputs to 'func', except state variables. Columns should be in 'variables' order.
    func : callable
        Function of state variables and 'variables'.
        See 'make_callable' docstring for details.
    output : string
        Desired name of the output result in the Dataset.
    maximum_internal_dof : int
        Largest number of internal degrees of freedom of any phase. This is used
        to guarantee different phase's Datasets can be concatenated.

    Returns
    -------
    xray.Dataset of the output attribute as a function of state variables

    Examples
    --------
    None yet.
    """
    # Broadcast compositions and state variables along orthogonal axes
    # This lets us eliminate an expensive Python loop
    statevar_grid = np.meshgrid(*itertools.chain(statevar_dict.values(),
                                                 [np.empty(points.shape[-2])]),
                                sparse=True,
                                indexing='ij')[:-1]
    points = broadcast_to(
        points,
        tuple(len(np.atleast_1d(x))
              for x in statevar_dict.values()) + points.shape[-2:])
    phase_output = func(
        *itertools.chain(statevar_grid, np.rollaxis(points, -1, start=0)))

    # Map the internal degrees of freedom to global coordinates
    # Normalize site ratios by the sum of site ratios times a factor
    # related to the site fraction of vacancies
    site_ratio_normalization = np.zeros(points.shape[:-1])
    for idx, sublattice in enumerate(phase_obj.constituents):
        vacancy_column = np.ones(points.shape[:-1])
        if 'VA' in set(sublattice):
            var_idx = variables.index(v.SiteFraction(phase_obj.name, idx,
                                                     'VA'))
            vacancy_column -= points[..., :, var_idx]
        site_ratio_normalization += phase_obj.sublattices[idx] * vacancy_column

    phase_compositions = np.empty(points.shape[:-1] + (len(components), ))
    for col, comp in enumerate(components):
        avector = [float(vxx.species == comp) * \
            phase_obj.sublattices[vxx.sublattice_index] for vxx in variables]
        phase_compositions[..., :,
                           col] = np.divide(np.dot(points[..., :, :], avector),
                                            site_ratio_normalization)

    coordinate_dict = {'component': components}
    coordinate_dict.update(
        {key: np.atleast_1d(value)
         for key, value in statevar_dict.items()})
    output_columns = [str(x) for x in statevar_dict.keys()] + ['points']
    # Resize 'points' so it has the same number of columns as the maximum
    # number of internal degrees of freedom of any phase in the calculation.
    # We do this so that everything is aligned for concat.
    # Waste of memory? Yes, but the alternatives are unclear.
    expanded_points = np.full(points.shape[:-1] + (maximum_internal_dof, ),
                              np.nan)
    expanded_points[..., :points.shape[-1]] = points
    data_arrays = {
        'X': (output_columns + ['component'], phase_compositions),
        'Phase': (output_columns,
                  np.full(points.shape[:-1],
                          phase_obj.name,
                          dtype='U' + str(len(phase_obj.name)))),
        'Y': (output_columns + ['internal_dof'], expanded_points),
        output: ([
            'dim_' + str(i)
            for i in range(len(phase_output.shape) - len(output_columns))
        ] + output_columns, phase_output)
    }

    return xray.Dataset(data_arrays, coords=coordinate_dict)
Beispiel #4
0
def _compute_phase_values(phase_obj, components, variables, statevar_dict, points, func, output, maximum_internal_dof):
    """
    Calculate output values for a particular phase.

    Parameters
    ----------
    phase_obj : Phase
        Phase object from a thermodynamic database.
    components : list
        Names of components to consider in the calculation.
    variables : list
        Names of variables in the phase's internal degrees of freedom.
    statevar_dict : OrderedDict {str -> float or sequence}
        Mapping of state variables to desired values. This will broadcast if necessary.
    points : ndarray
        Inputs to 'func', except state variables. Columns should be in 'variables' order.
    func : callable
        Function of state variables and 'variables'.
        See 'make_callable' docstring for details.
    output : string
        Desired name of the output result in the Dataset.
    maximum_internal_dof : int
        Largest number of internal degrees of freedom of any phase. This is used
        to guarantee different phase's Datasets can be concatenated.

    Returns
    -------
    xray.Dataset of the output attribute as a function of state variables

    Examples
    --------
    None yet.
    """
    # Broadcast compositions and state variables along orthogonal axes
    # This lets us eliminate an expensive Python loop
    statevar_grid = np.meshgrid(
        *itertools.chain(statevar_dict.values(), [np.empty(points.shape[-2])]), sparse=True, indexing="ij"
    )[:-1]
    points = broadcast_to(points, tuple(len(np.atleast_1d(x)) for x in statevar_dict.values()) + points.shape[-2:])
    phase_output = func(*itertools.chain(statevar_grid, np.rollaxis(points, -1, start=0)))

    # Map the internal degrees of freedom to global coordinates
    # Normalize site ratios by the sum of site ratios times a factor
    # related to the site fraction of vacancies
    site_ratio_normalization = np.zeros(points.shape[:-1])
    for idx, sublattice in enumerate(phase_obj.constituents):
        vacancy_column = np.ones(points.shape[:-1])
        if "VA" in set(sublattice):
            var_idx = variables.index(v.SiteFraction(phase_obj.name, idx, "VA"))
            vacancy_column -= points[..., :, var_idx]
        site_ratio_normalization += phase_obj.sublattices[idx] * vacancy_column

    phase_compositions = np.empty(points.shape[:-1] + (len(components),))
    for col, comp in enumerate(components):
        avector = [float(vxx.species == comp) * phase_obj.sublattices[vxx.sublattice_index] for vxx in variables]
        phase_compositions[..., :, col] = np.divide(np.dot(points[..., :, :], avector), site_ratio_normalization)

    coordinate_dict = {"component": components}
    coordinate_dict.update({key: np.atleast_1d(value) for key, value in statevar_dict.items()})
    output_columns = [str(x) for x in statevar_dict.keys()] + ["points"]
    # Resize 'points' so it has the same number of columns as the maximum
    # number of internal degrees of freedom of any phase in the calculation.
    # We do this so that everything is aligned for concat.
    # Waste of memory? Yes, but the alternatives are unclear.
    expanded_points = np.full(points.shape[:-1] + (maximum_internal_dof,), np.nan)
    expanded_points[..., : points.shape[-1]] = points
    data_arrays = {
        "X": (output_columns + ["component"], phase_compositions),
        "Phase": (output_columns, np.full(points.shape[:-1], phase_obj.name, dtype="U" + str(len(phase_obj.name)))),
        "Y": (output_columns + ["internal_dof"], expanded_points),
        output: (
            ["dim_" + str(i) for i in range(len(phase_output.shape) - len(output_columns))] + output_columns,
            phase_output,
        ),
    }

    return xray.Dataset(data_arrays, coords=coordinate_dict)