def _regress_out_chunk(X, y):
    """
    Performs a data_cunk.shape[1] number of local linear regressions,
    replacing the data in the original chunk w/ the regressed result.

    Parameters
    ----------

    X : cupy.ndarray of shape (n_cells, 3)
        Matrix of regressors

    y : cupy.sparse.spmatrix of shape (n_cells,)
        Sparse matrix containing a single column of the cellxgene matrix

    Returns
    -------

    dense_mat : cupy.ndarray of shape (n_cells,)
        Adjusted column
    """
    y_d = y.todense()

    lr = LinearRegression(fit_intercept=False, output_type="cupy")
    lr.fit(X, y_d, convert_dtype=True)
    return y_d.reshape(y_d.shape[0], ) - lr.predict(X).reshape(y_d.shape[0])
def _regress_out_chunk(X, y):
    """
    Performs a data_cunk.shape[1] number of local linear regressions,
    replacing the data in the original chunk w/ the regressed result. 
    """
    output = []
    lr = LinearRegression(fit_intercept=False)
    lr.fit(X, y, convert_dtype=True)
    return y.reshape(y.shape[0], ) - lr.predict(X).reshape(y.shape[0])
def _regress_out_chunk(data_chunk, regressors):
    """
    Performs a data_cunk.shape[1] number of local linear regressions,
    replacing the data in the original chunk w/ the regressed result. 
    """

    output = []

    for col in range(data_chunk.shape[1]):
        y = data_chunk[:, col]
        X = regressors
        lr = LinearRegression(fit_intercept=False)
        lr.fit(X, y, convert_dtype=True)
        mu = lr.predict(X)

        data_chunk[:, col] = y - mu

    return data_chunk
示例#4
0
         model = regression.AutoSklearnRegressor(
             time_left_for_this_task=alg.task_time,
             per_run_time_limit=alg.run_time,
             resampling_strategy=alg.sampling_strategy,
             resampling_strategy_arguments={'folds': alg.folds}
         )
     else:
         model = regression.AutoSklearnRegressor(
             time_left_for_this_task=alg.task_time,
             per_run_time_limit=alg.run_time
         )
     warn_not_gpu_support(alg)
 elif alg.name == 'LinearRegression':
     if NVIDIA_RAPIDS_ENABLED:
         from cuml.linear_model import LinearRegression
         model = LinearRegression(**alg.input_variables.__dict__)
     else:
         from sklearn.linear_model import LinearRegression
         model = LinearRegression(**alg.input_variables.__dict__)
 elif alg.name == 'SupportVectorRegression':
     if NVIDIA_RAPIDS_ENABLED:
         from cuml.svm import SVR
     else:
         from sklearn.svm import SVR
     model = SVR(**alg.input_variables.__dict__)
 elif alg.name == 'BayesianRidgeRegression':
     from sklearn.linear_model import BayesianRidge
     model = BayesianRidge(**alg.input_variables.__dict__)
     warn_not_gpu_support(alg)
 elif alg.name == 'AdaBoost' and alg.type == 'regression':
     from sklearn.ensemble import AdaBoostRegressor
示例#5
0
 def function(self):
     self.out_1.val = LinearRegression()
def lr_gpu():
    lr_gpu = LRCUML(fit_intercept=True, normalize=False, algorithm="eig")
    res = lr_gpu.fit(X, y)