Ejemplo n.º 1
0
def expand_basis(indices):
    nvars, nindices = indices.shape
    indices_set = set()
    for ii in range(nindices):
        indices_set.add(hash_array(indices[:, ii]))

    new_indices = []
    for ii in range(nindices):
        index = indices[:, ii]
        active_vars = np.nonzero(index)
        for dd in range(nvars):
            forward_index = get_forward_neighbor(index, dd)
            key = hash_array(forward_index)
            if key not in indices_set:
                admissible = True
                for kk in active_vars:
                    backward_index = get_backward_neighbor(forward_index, kk)
                    if hash_array(backward_index) not in indices_set:
                        admissible = False
                        break
                if admissible:
                    indices_set.add(key)
                    new_indices.append(forward_index)
    return np.asarray(new_indices).T
Ejemplo n.º 2
0
def cross_validate_pce_degree(pce,
                              train_samples,
                              train_vals,
                              min_degree=1,
                              max_degree=3,
                              hcross_strength=1,
                              cv=10,
                              solver_type='lars',
                              verbosity=0):
    r"""
    Use cross validation to find the polynomial degree which best fits the data.
    A polynomial is constructed for each degree and the degree with the highest
    cross validation score is returned.
    
    Parameters
    ----------
    train_samples : np.ndarray (nvars,nsamples)
        The inputs of the function used to train the approximation

    train_vals : np.ndarray (nvars,nsamples)
        The values of the function at ``train_samples``
    
    min_degree : integer
        The minimum degree to consider

    min_degree : integer
        The maximum degree to consider. 
        All degrees in ``range(min_degree,max_deree+1)`` are considered

    hcross_strength : float
       The strength of the hyperbolic cross index set. hcross_strength must be 
       in (0,1]. A value of 1 produces total degree polynomials

    cv : integer
        The number of cross validation folds used to compute the cross 
        validation error

    solver_type : string
        The type of regression used to train the polynomial

        - 'lasso_lars'
        - 'lars'
        - 'lasso'
        - 'omp'

    verbosity : integer
        Controls the amount of information printed to screen

    Returns
    -------
    result : :class:`pyapprox.approximate.ApproximateResult`
         Result object with the following attributes

    approx : :class:`pyapprox.multivariate_polynomials.PolynomialChaosExpansion`
        The PCE approximation

    scores : np.ndarray (nqoi)
        The best cross validation score for each QoI

    degrees : np.ndarray (nqoi)
        The best degree for each QoI
    """
    coefs = []
    scores = []
    indices = []
    degrees = []
    indices_dict = dict()
    unique_indices = []
    nqoi = train_vals.shape[1]
    for ii in range(nqoi):
        if verbosity > 1:
            print(f'Approximating QoI: {ii}')
        pce_ii, score_ii, degree_ii = _cross_validate_pce_degree(
            pce, train_samples, train_vals[:, ii:ii + 1], min_degree,
            max_degree, hcross_strength, cv, solver_type, verbosity)
        coefs.append(pce_ii.get_coefficients())
        scores.append(score_ii)
        indices.append(pce_ii.get_indices())
        degrees.append(degree_ii)
        for index in indices[ii].T:
            key = hash_array(index)
            if key not in indices_dict:
                indices_dict[key] = len(unique_indices)
                unique_indices.append(index)

    unique_indices = np.array(unique_indices).T
    all_coefs = np.zeros((unique_indices.shape[1], nqoi))
    for ii in range(nqoi):
        for jj, index in enumerate(indices[ii].T):
            key = hash_array(index)
            all_coefs[indices_dict[key], ii] = coefs[ii][jj, 0]
    pce.set_indices(unique_indices)
    pce.set_coefficients(all_coefs)
    return ApproximateResult({
        'approx': pce,
        'scores': np.array(scores),
        'degrees': np.array(degrees)
    })
Ejemplo n.º 3
0
def expanding_basis_omp_pce(pce,
                            train_samples,
                            train_vals,
                            hcross_strength=1,
                            verbosity=1,
                            max_num_terms=None,
                            solver_type='lasso_lars',
                            cv=10,
                            restriction_tol=np.finfo(float).eps * 2):
    r"""
    Iteratively expand and restrict the polynomial basis and use 
    cross validation to find the best basis [JESJCP2015]_
    
    Parameters
    ----------
    train_samples : np.ndarray (nvars,nsamples)
        The inputs of the function used to train the approximation

    train_vals : np.ndarray (nvars,nqoi)
        The values of the function at ``train_samples``
    
    hcross_strength : float
       The strength of the hyperbolic cross index set. hcross_strength must be 
       in (0,1]. A value of 1 produces total degree polynomials

    cv : integer
        The number of cross validation folds used to compute the cross 
        validation error

    solver_type : string
        The type of regression used to train the polynomial

        - 'lasso_lars'
        - 'lars'
        - 'lasso'
        - 'omp'

    verbosity : integer
        Controls the amount of information printed to screen

    restriction_tol : float
        The tolerance used to prune inactive indices

    Returns
    -------
    result : :class:`pyapprox.approximate.ApproximateResult`
         Result object with the following attributes

    approx : :class:`pyapprox.multivariate_polynomials.PolynomialChaosExpansion`
        The PCE approximation

    scores : np.ndarray (nqoi)
        The best cross validation score for each QoI

    References
    ----------
    .. [JESJCP2015] `J.D. Jakeman, M.S. Eldred, and K. Sargsyan. Enhancing l1-minimization estimates of polynomial chaos expansions using basis selection. Journal of Computational Physics, 289(0):18 – 34, 2015 <https://doi.org/10.1016/j.jcp.2015.02.025>`_
    """
    coefs = []
    scores = []
    indices = []
    indices_dict = dict()
    unique_indices = []
    nqoi = train_vals.shape[1]
    for ii in range(nqoi):
        if verbosity > 1:
            print(f'Approximating QoI: {ii}')
        pce_ii, score_ii = _expanding_basis_omp_pce(pce, train_samples,
                                                    train_vals[:, ii:ii + 1],
                                                    hcross_strength, verbosity,
                                                    max_num_terms, solver_type,
                                                    cv, restriction_tol)
        coefs.append(pce_ii.get_coefficients())
        scores.append(score_ii)
        indices.append(pce_ii.get_indices())
        for index in indices[ii].T:
            key = hash_array(index)
            if key not in indices_dict:
                indices_dict[key] = len(unique_indices)
                unique_indices.append(index)

    unique_indices = np.array(unique_indices).T
    all_coefs = np.zeros((unique_indices.shape[1], nqoi))
    for ii in range(nqoi):
        for jj, index in enumerate(indices[ii].T):
            key = hash_array(index)
            all_coefs[indices_dict[key], ii] = coefs[ii][jj, 0]
    pce.set_indices(unique_indices)
    pce.set_coefficients(all_coefs)
    return ApproximateResult({'approx': pce, 'scores': np.array(scores)})
Ejemplo n.º 4
0
def approximate_fixed_pce(pce,
                          train_samples,
                          train_vals,
                          indices,
                          verbose=1,
                          solver_type='lasso',
                          linear_solver_options={}):
    r"""
    Estimate the coefficients of a polynomial chaos using regression methods
    and pre-specified (fixed) basis and regularization parameters

    Parameters
    ----------
    train_samples : np.ndarray (nvars, nsamples)
        The inputs of the function used to train the approximation

    train_vals : np.ndarray (nvars, nqoi)
        The values of the function at ``train_samples``

    indices : np.ndarray (nvars, nindices)
        The multivariate indices representing each basis in the expansion.

    solver_type : string
        The type of regression used to train the polynomial

        - 'lasso'
        - 'lars'
        - 'lasso_grad'
        - 'omp'

    verbose : integer
        Controls the amount of information printed to screen

    Returns
    -------
    result : :class:`pyapprox.approximate.ApproximateResult`
         Result object with the following attributes

    approx : :class:`pyapprox.multivariate_polynomials.PolynomialChaosExpansion`
        The PCE approximation

    reg_params : np.ndarray (nqoi)
        The regularization parameters for each QoI.
    """
    nqoi = train_vals.shape[1]
    coefs = []
    if type(linear_solver_options) == dict:
        linear_solver_options = [linear_solver_options] * nqoi
    if type(indices) == np.ndarray:
        indices = [indices.copy() for ii in range(nqoi)]
    unique_indices = []
    indices_dict = dict()
    for ii in range(nqoi):
        pce.set_indices(indices[ii])
        basis_matrix = pce.basis_matrix(train_samples)
        coef_ii, _, reg_param_ii = fit_linear_model(
            basis_matrix, train_vals[:, ii:ii + 1], solver_type,
            **linear_solver_options[ii])
        coefs.append(coef_ii)
        for index in indices[ii].T:
            key = hash_array(index)
            if key not in indices_dict:
                indices_dict[key] = len(unique_indices)
                unique_indices.append(index)

    unique_indices = np.array(unique_indices).T
    all_coefs = np.zeros((unique_indices.shape[1], nqoi))
    for ii in range(nqoi):
        for jj, index in enumerate(indices[ii].T):
            key = hash_array(index)
            all_coefs[indices_dict[key], ii] = coefs[ii][jj, 0]
    pce.set_indices(unique_indices)
    pce.set_coefficients(all_coefs)
    return ApproximateResult({'approx': pce})