def gs_BIKE_Ridge(A_list, yV, alphas_log=(1, -1, 9), X_concat=None, n_folds=5, n_jobs=-1): """ As is a list of A matrices where A is similarity matrix. X is a concatened linear descriptors. If no X is used, X can be empty """ clf = binary_model.BIKE_Ridge(A_list, X_concat) parmas = {'alpha': np.logspace(*alphas_log)} ln = A_list[0].shape[0] # ls is the number of molecules. kf_n = cross_validation.KFold(ln, n_folds=n_folds, shuffle=True) gs = grid_search.GridSearchCV(clf, parmas, scoring='r2', cv=kf_n, n_jobs=n_jobs) AX_idx = np.array([list(range(ln))]).T gs.fit(AX_idx, yV) return gs
def cv_BIKE_Ridge( A_list, yV, alpha = 0.5, XX = None, n_splits = 5, n_jobs = -1, grid_std = None): clf = binary_model.BIKE_Ridge( A_list, XX, alpha = alpha) ln = A_list[0].shape[0] # ls is the number of molecules. kf_n_c = model_selection.KFold( n_splits = n_splits, shuffle=True) kf_n = kf5_ext_c.split( A_list[0]) AX_idx = np.array([list(range( ln))]).T yV_pred = model_selection.cross_val_predict( clf, AX_idx, yV, cv = kf_n, n_jobs = n_jobs) print('The prediction output using cross-validation is given by:') jutil.cv_show( yV, yV_pred, grid_std = grid_std) return yV_pred
def gs_Ridge_BIKE( A_list, yV, XX = None, alphas_log = (1, -1, 9), n_splits = 5, n_jobs = -1): """ As is a list of A matrices where A is similarity matrix. X is a concatened linear descriptors. If no X is used, X can be empty """ clf = binary_model.BIKE_Ridge( A_list, XX) parmas = {'alpha': np.logspace( *alphas_log)} ln = A_list[0].shape[0] # ls is the number of molecules. kf_n_c = model_selection.KFold( n_splits = n_splits, shuffle=True) #kf_n = kf5_ext_c.split( A_list[0]) gs = model_selection.GridSearchCV( clf, parmas, scoring = 'r2', cv = kf_n_c, n_jobs = n_jobs) AX_idx = np.array([list(range( ln))]).T gs.fit( AX_idx, yV) return gs