Esempio n. 1
0
def Kernelcv(alpha, gamma):
    return -(
        (-cv_s(KernelRidge(alpha=10**alpha, gamma=10**gamma, kernel='rbf'),
               traindata,
               target,
               "mean_squared_error",
               cv=10).mean())**0.5)
def xgbcv(
           max_depth, 
          learning_rate, 
          n_estimators,
          min_child_weight,
          gamma,
          subsample,
          colsample_bytree,
          reg_alpha, 
          reg_lambda,          
          silent=True):
    return cv_s(xgb.XGBRegressor(
                                             max_depth=int(max_depth),
                                             learning_rate=learning_rate,
                                             n_estimators=int(n_estimators),
                                             silent=silent,
                                             gamma=gamma,
                                             min_child_weight=min_child_weight,
                                             subsample=subsample,
                                             colsample_bytree=colsample_bytree,
                                             reg_alpha=reg_alpha,
                                             reg_lambda=reg_lambda),
                    train,
                    target,
                    "mean_squared_error",
                    cv=4).mean()
def xgbcv(
          max_depth, 
          learning_rate, 
          n_estimators,
          min_child_weight,
          gamma,
          subsample,
          colsample_bytree,
          reg_alpha,
          silent=True,
          nthread=8):
    return cv_s(XGBClassifier(
                                             max_depth=int(max_depth),
                                             learning_rate=learning_rate,
                                             n_estimators=int(n_estimators),
                                             silent=silent,
                                             nthread=nthread,
                                             gamma=gamma,
                                             reg_alpha=reg_alpha,
                                             min_child_weight=min_child_weight,
                                             subsample=subsample,
                                             colsample_bytree=colsample_bytree,
                                             objective='multi:softprob'),
                    train,
                    outcome,
                    "accuracy",
                    cv=4).mean()
def xgbcv(
           max_depth, 
          learning_rate, 
          n_estimators,
          min_child_weight,
          gamma,
          subsample,
          colsample_bytree,
          silent=True,
          nthread=8):
    return cv_s(xgb.XGBClassifier(
                                             max_depth=int(max_depth),
                                             learning_rate=learning_rate,
                                             n_estimators=int(n_estimators),
                                             silent=silent,
                                             nthread=nthread,
                                             gamma=gamma,
                                             min_child_weight=min_child_weight,
                                             subsample=subsample,
                                             colsample_bytree=colsample_bytree,
                                             objective='multi:softprob'),
                    xd,
                    yd,
                    "log_loss",
                    cv=4).mean()
def xgbcv(max_depth, max_features, n_estimators, min_samples_leaf, n_jobs=-1):
    return cv_s(ExtraTreesRegressor(max_depth=int(max_depth),
                                    max_features=max_features,
                                    n_estimators=int(n_estimators),
                                    min_samples_leaf=int(min_samples_leaf)),
                train,
                target,
                "mean_squared_error",
                cv=4).mean()
Esempio n. 6
0
def xgbcv(max_depth, max_features, n_estimators, min_samples_leaf, n_jobs=-1):
    return cv_s(ExtraTreesClassifier(max_depth=int(max_depth),
                                     max_features=max_features,
                                     n_estimators=int(n_estimators),
                                     min_samples_leaf=int(min_samples_leaf)),
                train,
                target['status_group'],
                "log_loss",
                cv=4).mean()
Esempio n. 7
0
def xgbcv(max_depth, max_features, n_estimators, min_samples_leaf, n_jobs=-1):
    return cv_s(RandomForestClassifier(max_depth=int(max_depth),
                                       max_features=max_features,
                                       n_estimators=int(n_estimators),
                                       min_samples_leaf=int(min_samples_leaf)),
                train,
                target['status_group'],
                "accuracy",
                cv=4).mean()
def rfcv(n_estimators, min_samples_split, max_features, min_samples_leaf):
    return cv_s(ExtraTreesClassifier(n_estimators=int(n_estimators),
                                     min_samples_split=int(min_samples_split),
                                     max_features=min(max_features, 0.999),
                                     min_samples_leaf=int(min_samples_leaf),
                                     criterion="gini"),
                cx,
                cy,
                "accuracy",
                cv=4).mean()
def KNNcv(n_neighbors):
    return cv_s(KNeighborsRegressor(
           n_neighbors=int(n_neighbors), 
           weights='distance', 
           algorithm='brute',
           metric='cosine'),
                    train,
                    target,
                    "mean_squared_error",
                    cv=15).mean()
Esempio n. 10
0
def rfcv(n_estimators, min_samples_split, max_features, min_samples_leaf):
    return cv_s(RandomForestClassifier(
        n_estimators=int(n_estimators),
        min_samples_split=int(min_samples_split),
        max_features=min(max_features, 0.999),
        min_samples_leaf=int(min_samples_leaf),
        criterion="gini"),
                xc,
                yc,
                "log_loss",
                cv=4).mean()
Esempio n. 11
0
def Kernelcv(
          alpha_1,
          alpha_2,
          lambda_1,
          lambda_2):
    return cv_s(BayesianRidge(n_iter=800, 
                    alpha_1=alpha_1, 
                    alpha_2=alpha_2, 
                    lambda_1=lambda_1, 
                    lambda_2=lambda_2
                            ),
                    train,
                    target,
                    "mean_squared_error",
                    cv=4).mean()
Esempio n. 12
0
def xgbcv(max_depth,
          learning_rate,
          n_estimators,
          min_child_weight,
          gamma,
          reg_alpha,
          subsample=1,
          colsample_bytree=1,
          silent=True):
    return cv_s(xgb.XGBRegressor(max_depth=int(max_depth),
                                 learning_rate=learning_rate,
                                 n_estimators=int(n_estimators),
                                 gamma=gamma,
                                 reg_alpha=reg_alpha,
                                 min_child_weight=min_child_weight,
                                 objective='reg:linear'),
                gpstrain,
                height,
                "mean_squared_error",
                cv=4).mean()
Esempio n. 13
0
def xgbcv(max_depth,
          learning_rate,
          n_estimators,
          min_child_weight,
          gamma,
          subsample,
          colsample_bytree,
          reg_alpha,
          reg_lambda,
          silent=True):
    return cv_s(xgb.XGBClassifier(max_depth=int(max_depth),
                                  learning_rate=learning_rate,
                                  n_estimators=int(n_estimators),
                                  gamma=gamma,
                                  reg_alpha=reg_alpha,
                                  min_child_weight=min_child_weight,
                                  objective='multi:softmax'),
                train,
                target['status_group'],
                "accuracy",
                cv=4).mean()
Esempio n. 14
0
def xgbcv(max_depth,
          learning_rate,
          n_estimators,
          min_child_weight,
          gamma,
          subsample,
          colsample_bytree,
          silent=True,
          nthread=8):
    return cv_s(xgb.XGBRegressor(max_depth=int(max_depth),
                                 learning_rate=learning_rate,
                                 n_estimators=int(n_estimators),
                                 silent=silent,
                                 gamma=gamma,
                                 min_child_weight=min_child_weight,
                                 subsample=subsample,
                                 colsample_bytree=colsample_bytree,
                                 objective='reg:linear'),
                train,
                target,
                "mean_squared_error",
                cv=4).mean()
def Kernelcv(alpha):
    return cv_s(Lasso(alpha=10**alpha),
                train,
                target,
                "mean_squared_error",
                cv=15).mean()
Esempio n. 16
0
def lrcv(C):
    return cv_s(LogisticRegression(C=10**C),
                train,
                target['status_group'],
                "log_loss",
                cv=4).mean()
def Kernelcv(alpha, gamma):
    return cv_s(KernelRidge(alpha=10**alpha, gamma=10**gamma, kernel='rbf'),
                train,
                target,
                "mean_squared_error",
                cv=4).mean()