Example #1
0
 def diagnostics(self):
     ss = self.av_respsurf.avmap.domain.subspaces
     eigenvalues(ss.eigenvalues[:10,0], e_br=ss.e_br[:10,:])
     subspace_errors(ss.sub_br[:10,:])
     eigenvectors(ss.eigenvectors[:,:4])
     Y = np.dot(self.X, ss.eigenvectors[:,:2])
     sufficient_summary(Y, self.f)
Example #2
0
 def diagnostics(self):
     ss = self.subspaces
     eigenvalues(ss.eigenvalues, e_br=ss.e_br)
     subspace_errors(ss.sub_br)
     eigenvectors(ss.eigenvectors)
     Y = np.dot(self.X, ss.eigenvectors[:, :2])
     sufficient_summary(Y, self.f)
Example #3
0
    def diagnostics(self):
        """
        Make plots that help determine the quality of the active subspace-
        enabled response surface and approximation.

        **Notes**

        This method produces four useful plots for verifying the quality of the
        active subspace-enable approximation.

        #. A semilog plot of the first 10 eigenvalues with their bootstrap ranges. \
        One is typically looking for large gaps between the\
        eigenvalues in the log space.
        #. A semilog plot of the estimated errors in the estimated active\
        subspace. This plot uses a bootstrap to estimate the errors.
        #. A plot of the components of the first four eigenvectors. These\
        components often reveal insights into the simulation's important\
        parameters.
        #. A 1d and a 2d summary plot of the computed quantity of interest at\
        different values of the first and second active variables. These plots\
        can be very useful in revealing the structure in the quantity of\
        interest as a function of the inputs.

        """
        ss = self.as_respsurf.avmap.domain.subspaces
        eigenvalues(ss.eigenvalues[:10,0], e_br=ss.e_br[:10,:])
        subspace_errors(ss.sub_br[:10,:])
        eigenvectors(ss.eigenvectors[:,:4])
        Y = np.dot(self.X, ss.eigenvectors[:,:2])
        sufficient_summary(Y, self.f)
Example #4
0
def linear_gradient_check(X, f, n_boot=1000, in_labels=None, out_label=None):
    """
    Use the normalized gradient of a global linear model to define the active
    subspace.

    :param ndarray X: M-by-m matrix containing points in the simulation input
        space.
    :param ndarray f: M-by-1 matrix containing the corresponding simulation
        outputs.
    :param int n_boot: The number of bootstrap replicates.
    :param str[] in_labels: Contains strings that label the input parameters.
    :param str out_label: String that labels the simulation output.

    :return: w, m-by-1 matrix that is the normalized gradient of
        the global linear model.
    :rtype: ndarray

    **See Also**

    sdr.quadratic_model_check

    **Notes**

    This is usually my first step when analyzing a new data set. It can be used
    to identify a one-dimensional active subspace under two conditions: (i) the
    simulation output is roughly a monotonic function of the inputs and (ii)
    the simulation output is well represented by f(x) \approx g(w^T x).

    The function produces the summary plot, which can verify these assumptions.

    It also plots the components of `w`, which often provide insight into the
    important parameters of the model.
    """

    M, m = X.shape
    w = _lingrad(X, f)

    # bootstrap
    ind = np.random.randint(M, size=(M, n_boot))
    w_lb, w_ub = np.ones((m, 1)), -np.ones((m, 1))
    for i in range(n_boot):
        w_boot = _lingrad(X[ind[:,i],:], f[ind[:,i]]).reshape((m, 1))
        for j in range(m):
            if w_boot[j,0] < w_lb[j,0]:
                w_lb[j,0] = w_boot[j,0]
            if w_boot[j,0] > w_ub[j,0]:
                w_ub[j,0] = w_boot[j,0]
    w_br = np.hstack((w_lb, w_ub))

    # make sufficient summary plot
    y = np.dot(X, w)
    sufficient_summary(y, f, out_label=out_label)

    # plot weights
    eigenvectors(w, W_br=w_br, in_labels=in_labels, out_label=out_label)

    return w
Example #5
0
def quick_check(X, f, n_boot=1000, in_labels=None, out_label=None):
    """
    Description of quick_check.

    Arguments:
        X:
        f:
        n_boot: (deafult=1000)
        in_labels: (deafult=None)
        out_label: (deafult=None)
    Outputs:
        w:
    """

    M, m = X.shape
    w = lingrad(X, f)

    # bootstrap
    ind = np.random.randint(M, size=(M, n_boot))
    w_lb, w_ub = np.ones((m, 1)), -np.ones((m, 1))
    for i in range(n_boot):
        w_boot = lingrad(X[ind[:,i],:], f[ind[:,i]]).reshape((m, 1))
        for j in range(m):
            if w_boot[j,0] < w_lb[j,0]:
                w_lb[j,0] = w_boot[j,0]
            if w_boot[j,0] > w_ub[j,0]:
                w_ub[j,0] = w_boot[j,0]
    w_br = np.hstack((w_lb, w_ub))

    # make sufficient summary plot
    y = np.dot(X, w)
    sufficient_summary(y, f, out_label=out_label)

    # plot weights
    eigenvectors(w, W_br=w_br, in_labels=in_labels, out_label=out_label)

    return w