def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="reg:linear", booster='gbtree', n_jobs=1, nthread=None, gamma=0, min_child_weight=1, max_delta_step=0, subsample=1, colsample_bytree=1, colsample_bylevel=1, reg_alpha=0, reg_lambda=1, scale_pos_weight=1, base_score=0.5, random_state=0, seed=None, missing=None, **kwargs): # warnings.filterwarnings(module='sklearn*', action='ignore', category=DeprecationWarning) xgb.XGBRegressor.__init__(self, max_depth, learning_rate, n_estimators, silent, objective, booster, n_jobs, nthread, gamma, min_child_weight, max_delta_step, subsample, colsample_bytree, colsample_bylevel, reg_alpha, reg_lambda, scale_pos_weight, base_score, random_state, seed, missing, **kwargs) BaseWrapperReg.__init__(self)
def __init__(self, n_estimators=10, criterion="mse", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", max_leaf_nodes=None, min_impurity_decrease=0., min_impurity_split=None, bootstrap=True, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False): n_jobs = 4 n_estimators = int(n_estimators) _RandomForestRegressor.__init__( self, n_estimators, criterion, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_features, max_leaf_nodes, min_impurity_decrease, min_impurity_split, bootstrap, oob_score, n_jobs, random_state, verbose, warm_start) BaseWrapperReg.__init__(self)
def __init__(self, criterion="mse", splitter="best", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features=None, random_state=None, max_leaf_nodes=None, min_impurity_decrease=0., min_impurity_split=None, presort=False): _DecisionTreeRegressor.__init__( self, criterion, splitter, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_features, random_state, max_leaf_nodes, min_impurity_decrease, min_impurity_split, presort) BaseWrapperReg.__init__(self)
def __init__(self, alpha=1.0, l1_ratio=0.5, fit_intercept=True, normalize=False, precompute=False, max_iter=1000, copy_X=True, tol=1e-4, warm_start=False, positive=False, random_state=None, selection='cyclic'): _ElasticNetReg.__init__( self, alpha, l1_ratio, fit_intercept, normalize, precompute, max_iter, copy_X, tol, warm_start, positive, random_state, selection) BaseWrapperReg.__init__(self)
def __init__(self, criterion="mse", splitter="random", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", random_state=None, min_impurity_decrease=0., min_impurity_split=None, max_leaf_nodes=None): _ExtraTreeRegressor.__init__( self, criterion, splitter, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_features, random_state, min_impurity_decrease, min_impurity_split, max_leaf_nodes) BaseWrapperReg.__init__(self)
def __init__(self, loss='ls', learning_rate=0.1, n_estimators=100, subsample=1.0, criterion='friedman_mse', min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_depth=3, min_impurity_decrease=0., min_impurity_split=None, init=None, random_state=None, max_features=None, alpha=0.9, verbose=0, max_leaf_nodes=None, warm_start=False, presort='auto'): n_estimators = int(n_estimators) _GradientBoostingRegressor.__init__( self, loss, learning_rate, n_estimators, subsample, criterion, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_depth, min_impurity_decrease, min_impurity_split, init, random_state, max_features, alpha, verbose, max_leaf_nodes, warm_start, presort) BaseWrapperReg.__init__(self)
def __init__(self, eps=1e-3, n_alphas=100, alphas=None, fit_intercept=True, normalize=False, precompute='auto', max_iter=1000, tol=1e-4, copy_X=True, cv=None, verbose=False, n_jobs=1, positive=False, random_state=None, selection='cyclic'): n_alphas = int(n_alphas) _LassoCVReg.__init__( self, eps, n_alphas, alphas, fit_intercept, normalize, precompute, max_iter, tol, copy_X, cv, verbose, n_jobs, positive, random_state, selection) BaseWrapperReg.__init__(self)
def __init__(self, y_min=None, y_max=None, increasing=True, out_of_bounds='nan'): _IsotonicRegression.__init__(self, y_min, y_max, increasing, out_of_bounds) BaseWrapperReg.__init__(self)
def __init__(self, regressors, meta_regressor, verbose=0, store_train_meta_features=False, refit=True): _StackingRegressor.__init__(self, regressors, meta_regressor, verbose, store_train_meta_features, refit) BaseWrapperReg.__init__(self)
def __init__(self, hidden_layer_sizes=(100,), activation="relu", solver='adam', alpha=0.0001, batch_size='auto', learning_rate="constant", learning_rate_init=0.001, power_t=0.5, max_iter=1000, shuffle=True, random_state=None, tol=1e-4, verbose=False, warm_start=False, momentum=0.9, nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-8): _MLPRegressor.__init__( self, hidden_layer_sizes, activation, solver, alpha, batch_size, learning_rate, learning_rate_init, power_t, max_iter, shuffle, random_state, tol, verbose, warm_start, momentum, nesterovs_momentum, early_stopping, validation_fraction, beta_1, beta_2, epsilon) BaseWrapperReg.__init__(self)
def __init__(self, epsilon=1.35, max_iter=100, alpha=0.0001, warm_start=False, fit_intercept=True, tol=1e-05): _HuberRegressor.__init__(self, epsilon, max_iter, alpha, warm_start, fit_intercept, tol) BaseWrapperReg.__init__(self)
def __init__(self, alpha=1, kernel="linear", gamma=None, degree=3, coef0=1, kernel_params=None): _KernelRidge.__init__(self, alpha, kernel, gamma, degree, coef0, kernel_params) BaseWrapperReg.__init__(self)
def __init__(self, alphas=(0.1, 1.0, 10.0), fit_intercept=True, normalize=False, scoring=None, cv=None, gcv_mode=None, store_cv_values=False): _RidgeCVReg.__init__(self, alphas, fit_intercept, normalize, scoring, cv, gcv_mode, store_cv_values) BaseWrapperReg.__init__(self)
def __init__(self, alpha=1.0, fit_intercept=True, normalize=False, copy_X=True, max_iter=None, tol=1e-3, solver="auto", random_state=None): _RidgeReg.__init__(self, alpha, fit_intercept, normalize, copy_X, max_iter, tol, solver, random_state) BaseWrapperReg.__init__(self)
def __init__(self, kernel=None, alpha=1e-10, optimizer="fmin_l_bfgs_b", n_restarts_optimizer=0, normalize_y=False, copy_X_train=True, random_state=None): _GaussianProcessRegressor.__init__(self, kernel, alpha, optimizer, n_restarts_optimizer, normalize_y, copy_X_train, random_state) BaseWrapperReg.__init__(self)
def __init__(self, regressors, meta_regressor, cv=5, shuffle=True, use_features_in_secondary=False, store_train_meta_features=False, refit=True): _StackingCVRegressor.__init__(self, regressors, meta_regressor, cv, shuffle, use_features_in_secondary, store_train_meta_features, refit) BaseWrapperReg.__init__(self)
def __init__(self, radius=1.0, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, **kwargs): _RadiusNeighborsRegressor.__init__(self, radius, weights, algorithm, leaf_size, p, metric, metric_params, **kwargs) BaseWrapperReg.__init__(self)
def __init__(self, fit_intercept=True, copy_X=True, max_subpopulation=1e4, n_subsamples=None, max_iter=300, tol=1.e-3, random_state=None, n_jobs=1, verbose=False): max_iter = int(max_iter) _TheilSenRegressor.__init__(self, fit_intercept, copy_X, max_subpopulation, n_subsamples, max_iter, tol, random_state, n_jobs, verbose) BaseWrapperReg.__init__(self)
def __init__(self, eta=0.01, epochs=50, minibatches=None, random_seed=None, print_progress=0): epochs = int(epochs) _LinearRegression.__init__(self, eta, epochs, minibatches, random_seed, print_progress) BaseWrapperReg.__init__(self) warnings.filterwarnings(module='mlxtend*', action='ignore', category=FutureWarning)
def __init__(self, alpha=1.0, fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=np.finfo(np.float).eps, copy_X=True, fit_path=True, positive=False): _LassoLars.__init__(self, alpha, fit_intercept, verbose, normalize, precompute, max_iter, eps, copy_X, fit_path, positive) BaseWrapperReg.__init__(self)
def __init__(self, n_iter=300, tol=1.e-3, alpha_1=1.e-6, alpha_2=1.e-6, lambda_1=1.e-6, lambda_2=1.e-6, compute_score=False, fit_intercept=True, normalize=False, copy_X=True, verbose=False): _BayesianRidgeReg.__init__(self, n_iter, tol, alpha_1, alpha_2, lambda_1, lambda_2, compute_score, fit_intercept, normalize, copy_X, verbose) BaseWrapperReg.__init__(self)
def __init__(self, n_iter=300, tol=1.e-3, alpha_1=1.e-6, alpha_2=1.e-6, lambda_1=1.e-6, lambda_2=1.e-6, compute_score=False, threshold_lambda=1.e+4, fit_intercept=True, normalize=False, copy_X=True, verbose=False): _ARDRegression.__init__(self, n_iter, tol, alpha_1, alpha_2, lambda_1, lambda_2, compute_score, threshold_lambda, fit_intercept, normalize, copy_X, verbose) BaseWrapperReg.__init__(self)
def __init__(self, C=1.0, fit_intercept=True, max_iter=1000, tol=None, shuffle=True, verbose=0, loss="epsilon_insensitive", epsilon=DEFAULT_EPSILON, random_state=None, warm_start=False, average=False, n_iter=None): _PassiveAggressiveRegressor.__init__(self, C, fit_intercept, max_iter, tol, shuffle, verbose, loss, epsilon, random_state, warm_start, average, n_iter) BaseWrapperReg.__init__(self)
def __init__(self, base_estimator=None, n_estimators=10, max_samples=1.0, max_features=1.0, bootstrap=True, bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=1, random_state=None, verbose=0): n_estimators = int(n_estimators) _BaggingRegressor.__init__(self, base_estimator, n_estimators, max_samples, max_features, bootstrap, bootstrap_features, oob_score, warm_start, n_jobs, random_state, verbose) BaseWrapperReg.__init__(self)
def __init__(self, base_estimator=None, min_samples=None, residual_threshold=None, is_data_valid=None, is_model_valid=None, max_trials=100, max_skips=np.inf, stop_n_inliers=np.inf, stop_score=np.inf, stop_probability=0.99, residual_metric=None, loss='absolute_loss', random_state=None): _RANSACRegressor.__init__(self, base_estimator, min_samples, residual_threshold, is_data_valid, is_model_valid, max_trials, max_skips, stop_n_inliers, stop_score, stop_probability, residual_metric, loss, random_state) BaseWrapperReg.__init__(self)
def __init__(self, penalty='l2', dual=False, tol=1e-4, C=1.0, fit_intercept=True, intercept_scaling=1, class_weight=None, random_state=None, solver='liblinear', max_iter=100, multi_class='ovr', verbose=0, warm_start=False, n_jobs=1): _LogisticRegression.__init__(self, penalty, dual, tol, C, fit_intercept, intercept_scaling, class_weight, random_state, solver, max_iter, multi_class, verbose, warm_start, n_jobs) BaseWrapperReg.__init__(self)
def __init__(self, boosting_type="gbdt", num_leaves=31, max_depth=-1, learning_rate=0.1, n_estimators=100, subsample_for_bin=200000, objective=None, class_weight=None, min_split_gain=0., min_child_weight=1e-3, min_child_samples=20, subsample=1., subsample_freq=0, colsample_bytree=1., reg_alpha=0., reg_lambda=0., random_state=None, n_jobs=-1, silent=True, **kwargs): kwargs['verbose'] = -1 warnings.filterwarnings(module='sklearn*', action='ignore', category=DeprecationWarning) warnings.filterwarnings(module='lightgbm*', action='ignore', category=UserWarning) num_leaves = int(num_leaves) min_child_samples = int(min_child_samples) lightgbm.LGBMRegressor.__init__( self, boosting_type, num_leaves, max_depth, learning_rate, n_estimators, subsample_for_bin, objective, class_weight, min_split_gain, min_child_weight, min_child_samples, subsample, subsample_freq, colsample_bytree, reg_alpha, reg_lambda, random_state, n_jobs, silent, **kwargs) BaseWrapperReg.__init__(self)
def __init__(self, loss="squared_loss", penalty="l2", alpha=0.0001, l1_ratio=0.15, fit_intercept=True, max_iter=1000, tol=None, shuffle=True, verbose=0, epsilon=DEFAULT_EPSILON, random_state=None, learning_rate="invscaling", eta0=0.01, power_t=0.25, warm_start=False, average=False, n_iter=None): _SGDRegressor.__init__(self, loss, penalty, alpha, l1_ratio, fit_intercept, max_iter, tol, shuffle, verbose, epsilon, random_state, learning_rate, eta0, power_t, warm_start, average, n_iter) BaseWrapperReg.__init__(self)
def __init__(self, base_estimator=None, n_estimators=50, learning_rate=1., loss='linear', random_state=None): n_estimators = int(n_estimators) _AdaBoostRegressor.__init__(self, base_estimator, n_estimators, learning_rate, loss, random_state) BaseWrapperReg.__init__(self)
def __init__(self, n_neighbors=5, weights='uniform', algorithm='auto', leaf_size=30, p=2, metric='minkowski', metric_params=None, n_jobs=1, **kwargs): n_jobs = 4 _KNeighborsRegressor.__init__( self, n_neighbors, weights, algorithm, leaf_size, p, metric, metric_params, n_jobs, **kwargs) BaseWrapperReg.__init__(self)