def createSVMEpsRbfClassifier(params=None): info("Creating SVM Epsilon Rbf Classifier", ind=4) ## Params params = mergeParams(SVC(), params) kernel = 'rbf' tuneParams = getSVMEpsClassifierParams(kernel) grid = tuneParams['grid'] info("With Parameters", ind=4) C = setParam('C', params, grid, force=False) info("Param: C = {0}".format(C), ind=6) info("Param: kernel = {0}".format(kernel), ind=6) gamma = setParam('gamma', params, grid, force=False) info("Param: gamma = {0}".format(gamma), ind=6) probability = True info("Param: probability = {0}".format(probability), ind=6) ## estimator reg = SVC(C=C, probability=probability, gamma=gamma, kernel=kernel) return {"estimator": reg, "params": tuneParams}
def createSVMNuRbfClassifier(params=None): info("Creating SVM Nu Rbf Classifier", ind=4) ## Params params = mergeParams(NuSVC(), params) kernel = 'rbf' tuneParams = getSVMNuClassifierParams(kernel) grid = tuneParams['grid'] info("With Parameters", ind=4) nu = setParam('nu', params, grid, force=False) info("Param: nu = {0}".format(nu), ind=6) info("Param: kernel = {0}".format(kernel), ind=6) gamma = setParam('gamma', params, grid, force=False) info("Param: gamma = {0}".format(gamma), ind=6) probability = True info("Param: probability = {0}".format(probability), ind=6) ## estimator reg = NuSVC(kernel=kernel, nu=nu, probability=probability, gamma=gamma) return {"estimator": reg, "params": tuneParams}
def createPassiveAggressiveClassifier(params): info("Creating Passive Aggressive Classifier", ind=4) error("Does not give probabilities.") return {"estimator": None, "params": None} ## Params params = mergeParams(PassiveAggressiveClassifier(), params) tuneParams = getPassiveAggressiveClassifierParams() grid = tuneParams['grid'] info("With Parameters", ind=4) C = setParam('C', params, grid, force=False) info("Param: C = {0}".format(C), ind=6) loss = setParam('loss', params, grid, force=False) info("Param: loss = {0}".format(loss), ind=6) max_iter = setParam('max_iter', params, grid, force=False) info("Param: max_iter = {0}".format(max_iter), ind=6) tol = setParam('tol', params, grid, force=False) info("Param: tol = {0}".format(tol), ind=6) ## Estimator clf = PassiveAggressiveClassifier(C=C, loss=loss, max_iter=max_iter, tol=tol) return {"estimator": clf, "params": tuneParams}
def createSGDRegressor(params): info("Creating SGD Regressor", ind=4) ## Params params = mergeParams(SGDRegressor(), params) tuneParams = getSGDRegressorParams() grid = tuneParams['grid'] info("With Parameters", ind=4) alpha = setParam('alpha', params, grid, force=False) info("Param: alpha = {0}".format(alpha), ind=6) epsilon = setParam('epsilon', params, grid, force=False) info("Param: epsilon = {0}".format(epsilon), ind=6) eta0 = setParam('eta0', params, grid, force=False) info("Param: eta0 = {0}".format(eta0), ind=6) l1_ratio = setParam('l1_ratio', params, grid, force=False) info("Param: l1_ratio = {0}".format(l1_ratio), ind=6) learning_rate = setParam('learning_rate', params, grid, force=False) info("Param: learning_rate = {0}".format(learning_rate), ind=6) loss = setParam('loss', params, grid, force=False) info("Param: loss = {0}".format(loss), ind=6) max_iter = setParam('max_iter', params, grid, force=False) info("Param: max_iter = {0}".format(max_iter), ind=6) penalty = setParam('penalty', params, grid, force=False) info("Param: penalty = {0}".format(penalty), ind=6) power_t = setParam('power_t', params, grid, force=False) info("Param: power_t = {0}".format(power_t), ind=6) tol = setParam('tol', params, grid, force=False) info("Param: tol = {0}".format(tol), ind=6) ## Estimator reg = SGDRegressor(alpha=alpha, epsilon=epsilon, eta0=eta0, l1_ratio=l1_ratio, learning_rate=learning_rate, loss=loss, penalty=penalty, power_t=power_t) return {"estimator": reg, "params": tuneParams}
def createSVMLinearRegressor(params=None): info("Creating SVM Linear Regressor", ind=4) ## Params params = mergeParams(LinearSVR(), params) tuneParams = getSVMLinearRegressorParams() grid = tuneParams['grid'] info("With Parameters", ind=4) C = setParam('C', params, grid, force=False) info("Param: C = {0}".format(C), ind=6) loss = setParam('loss', params, grid, force=False) info("Param: loss = {0}".format(loss), ind=6) ## estimator reg = LinearSVR(C=C, loss=loss) return {"estimator": reg, "params": tuneParams}
def createAdaBoostRegressor(params): info("Creating AdaBoost Regressor", ind=4) ## Params params = mergeParams(AdaBoostRegressor(), params) tuneParams = getAdaBoostRegressorParams() grid = tuneParams['grid'] info("With Parameters", ind=6) learning_rate = setParam('learning_rate', params, grid) info("Param: learning_rate = {0}".format(learning_rate), ind=6) n_estimators = setParam('n_estimators', params, grid) info("Param: n_estimator = {0}".format(n_estimators), ind=6) ## Estimator reg = AdaBoostRegressor(learning_rate=learning_rate, n_estimators=n_estimators) return {"estimator": reg, "params": tuneParams}
def createSVMNuLinearRegressor(params=None): info("Creating SVM Nu Poly Regressor", ind=4) ## Params params = mergeParams(NuSVR(), params) kernel = 'linear' tuneParams = getSVMNuRegressorParams(kernel) grid = tuneParams['grid'] info("With Parameters", ind=4) C = setParam('C', params, grid, force=False) info("Param: C = {0}".format(C), ind=6) nu = setParam('nu', params, grid, force=False) info("Param: nu = {0}".format(nu), ind=6) info("Param: kernel = {0}".format(kernel), ind=6) ## estimator reg = NuSVR(C=C, kernel=kernel, nu=nu) return {"estimator": reg, "params": tuneParams}
def createDecisionTreeClassifier(params): info("Creating Decision Tree Classifier", ind=4) ## Params params = mergeParams(DecisionTreeClassifier(), params) tuneParams = getDecisionTreeClassifierParams() grid = tuneParams['grid'] info("With Parameters", ind=6) criterion = setParam('criterion', params, grid) info("Param: criterion = {0}".format(criterion), ind=6) max_depth = setParam('max_depth', params, grid) info("Param: max_depth = {0}".format(max_depth), ind=6) max_features = setParam('max_features', params, grid) info("Param: max_features = {0}".format(max_features), ind=6) max_leaf_nodes = setParam('max_leaf_nodes', params, grid) info("Param: max_leaf_nodes = {0}".format(max_leaf_nodes), ind=6) min_impurity_decrease = setParam('min_impurity_decrease', params, grid) info("Param: min_impurity_decrease = {0}".format(min_impurity_decrease), ind=6) min_samples_leaf = setParam('min_samples_leaf', params, grid) info("Param: min_samples_leaf = {0}".format(min_samples_leaf), ind=6) min_samples_split = setParam('min_samples_split', params, grid) info("Param: min_samples_split = {0}".format(min_samples_split), ind=6) min_weight_fraction_leaf = setParam('min_weight_fraction_leaf', params, grid) info("Param: min_weight_fraction_leaf = {0}".format( min_weight_fraction_leaf), ind=6) ## Estimator reg = DecisionTreeClassifier( criterion=criterion, max_depth=max_depth, max_features=max_features, max_leaf_nodes=max_leaf_nodes, min_impurity_decrease=min_impurity_decrease, min_samples_leaf=min_samples_leaf, min_samples_split=min_samples_split, min_weight_fraction_leaf=min_weight_fraction_leaf) return {"estimator": reg, "params": tuneParams}
def createSVMEpsLinearRegressor(params=None): info("Creating SVM Epsilon Poly Regressor", ind=4) ## Params params = mergeParams(SVR(), params) kernel = 'linear' tuneParams = getSVMEpsRegressorParams(kernel) grid = tuneParams['grid'] info("With Parameters", ind=4) C = setParam('C', params, grid, force=False) info("Param: C = {0}".format(C), ind=6) epsilon = setParam('epsilon', params, grid, force=False) info("Param: epsilon = {0}".format(epsilon), ind=6) info("Param: kernel = {0}".format(kernel), ind=6) ## estimator reg = SVR(C=C, epsilon=epsilon, kernel=kernel) return {"estimator": reg, "params": tuneParams}
def createSVMNuPolyClassifier(params=None): info("Creating SVM Nu Poly Classifier", ind=4) ## Params params = mergeParams(NuSVC(), params) kernel = 'poly' tuneParams = getSVMNuClassifierParams(kernel) grid = tuneParams['grid'] info("With Parameters", ind=4) nu = setParam('nu', params, grid, force=False) info("Param: nu = {0}".format(nu), ind=6) info("Param: kernel = {0}".format(kernel), ind=6) coef0 = setParam('coef0', params, grid, force=False) info("Param: coef0 = {0}".format(coef0), ind=6) degree = setParam('degree', params, grid, force=False) info("Param: degree = {0}".format(degree), ind=6) gamma = setParam('gamma', params, grid, force=False) info("Param: gamma = {0}".format(gamma), ind=6) probability = True info("Param: probability = {0}".format(probability), ind=6) ## estimator reg = NuSVC(coef0=coef0, degree=degree, gamma=gamma, probability=probability, kernel=kernel, nu=nu) return {"estimator": reg, "params": tuneParams}
def createSVMNuPolyRegressor(params=None): info("Creating SVM Nu Poly Regressor", ind=4) ## Params params = mergeParams(NuSVR(), params) kernel = 'poly' tuneParams = getSVMNuRegressorParams(kernel) grid = tuneParams['grid'] info("With Parameters", ind=4) C = setParam('C', params, grid, force=False) info("Param: C = {0}".format(C), ind=6) nu = setParam('nu', params, grid, force=False) info("Param: nu = {0}".format(nu), ind=6) info("Param: kernel = {0}".format(kernel), ind=6) coef0 = setParam('coef0', params, grid, force=False) info("Param: coef0 = {0}".format(coef0), ind=6) degree = setParam('degree', params, grid, force=False) info("Param: degree = {0}".format(degree), ind=6) gamma = setParam('gamma', params, grid, force=False) info("Param: gamma = {0}".format(gamma), ind=6) ## estimator reg = NuSVR(C=C, coef0=coef0, degree=degree, gamma=gamma, kernel=kernel, nu=nu) return {"estimator": reg, "params": tuneParams}
def createRandomForestRegressor(params): info("Creating Random Forest Regressor", ind=4) ## Params params = mergeParams(RandomForestRegressor(), params) tuneParams = getRandomForestRegressorParams() grid = tuneParams['grid'] info("With Parameters", ind=6) bootstrap = setParam('bootstrap', params, grid) info("Param: bootstrap = {0}".format(bootstrap), ind=6) criterion = setParam('criterion', params, grid) info("Param: criterion = {0}".format(criterion), ind=6) max_depth = setParam('max_depth', params, grid) info("Param: max_depth = {0}".format(max_depth), ind=6) max_features = setParam('max_features', params, grid) info("Param: max_features = {0}".format(max_features), ind=6) min_impurity_decrease = setParam('min_impurity_decrease', params, grid) info("Param: min_impurity_decrease = {0}".format(min_impurity_decrease), ind=6) min_samples_leaf = setParam('min_samples_leaf', params, grid) info("Param: min_samples_leaf = {0}".format(min_samples_leaf), ind=6) n_estimators = setParam('n_estimators', params, grid) info("Param: n_estimator = {0}".format(n_estimators), ind=6) n_jobs = setParam('n_jobs', params, grid) n_jobs = -1 info("Param: n_jobs = {0}".format(n_jobs), ind=6) ## Estimator reg = RandomForestRegressor(bootstrap=bootstrap, criterion=criterion, max_depth=max_depth, max_features=max_features, min_impurity_decrease=min_impurity_decrease, min_samples_leaf=min_samples_leaf, n_estimators=n_estimators, n_jobs=n_jobs) return {"estimator": reg, "params": tuneParams}
def createGBMRegressor(params): info("Creating GBM Regressor", ind=4) ## Params params = mergeParams(GradientBoostingRegressor(), params) tuneParams = getGradientBoostingRegressorParams() grid = tuneParams['grid'] info("With Parameters", ind=6) criterion = setParam('criterion', params, grid) info("Param: criterion = {0}".format(criterion), ind=6) learning_rate = setParam('learning_rate', params, grid) info("Param: learning_rate = {0}".format(learning_rate), ind=6) loss = setParam('loss', params, grid) info("Param: loss = {0}".format(loss), ind=6) max_depth = setParam('max_depth', params, grid) info("Param: max_depth = {0}".format(max_depth), ind=6) max_features = setParam('max_features', params, grid) info("Param: max_features = {0}".format(max_features), ind=6) min_impurity_decrease = setParam('min_impurity_decrease', params, grid) info("Param: min_impurity_decrease = {0}".format(min_impurity_decrease), ind=6) min_samples_leaf = setParam('min_samples_leaf', params, grid) info("Param: min_samples_leaf = {0}".format(min_samples_leaf), ind=6) n_estimators = setParam('n_estimators', params, grid) info("Param: n_estimator = {0}".format(n_estimators), ind=6) ## Estimator reg = GradientBoostingRegressor( criterion=criterion, learning_rate=learning_rate, loss=loss, max_depth=max_depth, max_features=max_features, min_impurity_decrease=min_impurity_decrease, min_samples_leaf=min_samples_leaf, n_estimators=n_estimators) return {"estimator": reg, "params": tuneParams}
def createBernoulliNaiveBayesClassifier(params): info("Creating Bernoulli Naive Bayes Classifier", ind=4) ## Params params = mergeParams(BernoulliNB(), params) tuneParams = getBernoulliNaiveBayesClassifierParams() grid = tuneParams['grid'] info("With Parameters", ind=4) alpha = setParam('alpha', params, grid, force=False) info("Param: alpha = {0}".format(alpha), ind=6) ## Estimator clf = BernoulliNB(alpha=alpha) return {"estimator": clf, "params": tuneParams}
def createQDAClassifier(params = None): info("Creating QDA Classifier", ind=4) ## Params params = mergeParams(QuadraticDiscriminantAnalysis(), params) tuneParams = getQuadraticDiscriminantAnalysisParams() grid = tuneParams['grid'] info("With Parameters", ind=6) reg_param = setParam('reg_param', params, grid) info("Param: reg_param = {0}".format(reg_param), ind=6) ## Estimator clf = QuadraticDiscriminantAnalysis(reg_param=reg_param) return {"estimator": clf, "params": tuneParams}
def createMultinomialNaiveBayesClassifier(params): info("Creating Multinomial Naive Bayes Classifier", ind=4) error("Multinomial Naive Bayes Classifier does not work", ind=4) return {"estimator": None, "params": None} ## Params params = mergeParams(MultinomialNB(), params) tuneParams = getMultinomialNaiveBayesClassifierParams() grid = tuneParams['grid'] info("With Parameters", ind=4) alpha = setParam('alpha', params, grid, force=False) info("Param: alpha = {0}".format(alpha), ind=6) ## Estimator clf = MultinomialNB(alpha=alpha) return {"estimator": clf, "params": tuneParams}
def createLogisticRegressionClassifier(params=None): info("Creating Logistic Regression Classifier", ind=4) ## Params params = mergeParams(LogisticRegression(), params) params = mergeParams(LogisticRegressionCV(), params) tuneParams = getLogisticRegressionClassifer() grid = tuneParams['grid'] ## Estimator if params.get('cv'): info("Using Built-In Cross Validation With Parameters", ind=4) tuneParams = getLogisticRegressionClassifer(cv=True) grid = tuneParams['grid'] Cs = setParam('Cs', params, grid, force=True) info("Param: Cs = {0}".format(Cs), ind=6) penalty = setParam('penalty', params, grid, force=True) info("Param: penalty = {0}".format(penalty), ind=6) solver = setParam('solver', params, grid, force=False) info("Param: solver = {0}".format(solver), ind=6) #n_jobs = -1 #info("Param: n_jobs = {0}".format(n_jobs), ind=6) clf = LogisticRegressionCV(Cs=Cs, penalty=penalty, solver=solver) else: info("With Parameters", ind=4) tuneParams = getLogisticRegressionClassifer(cv=False) grid = tuneParams['grid'] C = setParam('C', params, grid, force=False) info("Param: C = {0}".format(C), ind=6) penalty = setParam('penalty', params, grid, force=False) info("Param: penalty = {0}".format(penalty), ind=6) solver = setParam('solver', params, grid, force=False) info("Param: solver = {0}".format(solver), ind=6) #n_jobs = -1 #info("Param: n_jobs = {0}".format(n_jobs), ind=6) clf = LogisticRegression(C=C, penalty=penalty, solver=solver) return {"estimator": clf, "params": tuneParams}
def createXgboostRegressor(params=None): info("Creating Xgboost Regressor", ind=4) ## Params params = mergeParams(XGBRegressor(), params) tuneParams = getXGBRegressorParams() grid = tuneParams['grid'] info("With Parameters", ind=6) gamma = setParam('gamma', params, grid) info("Param: gamma = {0}".format(gamma), ind=6) max_depth = setParam('max_depth', params, grid) info("Param: max_depth = {0}".format(max_depth), ind=6) learning_rate = setParam('learning_rate', params, grid) info("Param: learning_rate = {0}".format(learning_rate), ind=6) n_estimators = setParam('n_estimators', params, grid) info("Param: n_estimators = {0}".format(n_estimators), ind=6) nthread = setParam('nthread', params, grid) info("Param: nthread = {0}".format(nthread), ind=6) reg_alpha = setParam('reg_alpha', params, grid) info("Param: reg_alpha = {0}".format(reg_alpha), ind=6) reg_lambda = setParam('reg_lambda', params, grid) info("Param: reg_lambda = {0}".format(reg_lambda), ind=6) ## Estimator reg = XGBRegressor(gamma=gamma, learning_rate=learning_rate, max_depth=max_depth, n_estimators=n_estimators, nthread=nthread, reg_alpha=reg_alpha, reg_lambda=reg_lambda) return {"estimator": reg, "params": tuneParams}
def createMLPRegressor(params=None): info("Creating MLP Regressor", ind=4) ## Params params = mergeParams(MLPRegressor(), params) tuneParams = getMLPRegressorParams() grid = tuneParams['grid'] info("With Parameters", ind=4) activation = setParam('activation', params, grid, force=False) info("Param: activation = {0}".format(activation), ind=6) alpha = setParam('alpha', params, grid, force=False) info("Param: alpha = {0}".format(alpha), ind=6) alpha = setParam('alpha', params, grid, force=False) info("Param: alpha = {0}".format(alpha), ind=6) beta_1 = setParam('beta_1', params, grid, force=False) info("Param: beta_1 = {0}".format(beta_1), ind=6) beta_2 = setParam('beta_2', params, grid, force=False) info("Param: beta_2 = {0}".format(beta_2), ind=6) hidden_layer_sizes = setParam('hidden_layer_sizes', params, grid, force=False) info("Param: hidden_layer_sizes = {0}".format(hidden_layer_sizes), ind=6) learning_rate = setParam('learning_rate', params, grid, force=False) info("Param: learning_rate = {0}".format(learning_rate), ind=6) max_iter = setParam('max_iter', params, grid, force=False) info("Param: max_iter = {0}".format(max_iter), ind=6) momentum = setParam('momentum', params, grid, force=False) info("Param: momentum = {0}".format(momentum), ind=6) power_t = setParam('power_t', params, grid, force=False) info("Param: power_t = {0}".format(power_t), ind=6) solver = setParam('solver', params, grid, force=False) info("Param: solver = {0}".format(solver), ind=6) reg = MLPRegressor(activation=activation, alpha=alpha, beta_1=beta_1, beta_2=beta_2, hidden_layer_sizes=hidden_layer_sizes, learning_rate=learning_rate, max_iter=max_iter, momentum=momentum, power_t=power_t, solver=solver) return {"estimator": reg, "params": tuneParams}