Ejemplo n.º 1
0
def createRidgeRegressor(params=None):
    info("Creating Ridge Regressor", ind=4)

    ## Params
    params = mergeParams(RidgeCV(), params)
    params = mergeParams(Ridge(), params)

    ## Estimator
    if params.get('cv') is True:
        info("Using Built-In Cross Validation With Parameters", ind=4)
        tuneParams = getRidgeRegressorParams(cv=True)
        grid = tuneParams['grid']

        alphas = setParam('alphas', params, grid, force=True)
        info("Param: alphas = {0}".format(alphas), ind=6)

        reg = RidgeCV(alphas=alphas)
    else:
        info("With Parameters", ind=4)
        tuneParams = getRidgeRegressorParams(cv=False)
        grid = tuneParams['grid']

        alpha = setParam('alpha', params, grid, force=False)
        info("Param: alpha = {0}".format(alpha), ind=6)

        reg = Ridge(alpha=alpha)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 2
0
def createElasticNetRegressor(params=None):
    info("Creating ElasticNet Regressor", ind=4)

    ## Params
    params = mergeParams(ElasticNetCV(), params)
    params = mergeParams(ElasticNet(), params)

    ## Estimator
    if params.get('cv') is True:
        info("Using Built-In Cross Validation With Parameters", ind=4)
        tuneParams = getElasticNetRegressorParams(cv=True)
        grid = tuneParams['grid']

        alphas = setParam('alphas', params, grid, force=True)
        info("Param: alphas = {0}".format(alphas), ind=6)

        l1_ratio = setParam('l1_ratio', params, grid, force=True)
        info("Param: l1_ratio = {0}".format(l1_ratio), ind=6)

        reg = ElasticNetCV(alphas=alphas, l1_ratio=l1_ratio)
    else:
        info("With Parameters", ind=4)
        tuneParams = getElasticNetRegressorParams(cv=False)
        grid = tuneParams['grid']

        alpha = setParam('alpha', params, grid, force=False)
        info("Param: alpha = {0}".format(alpha), ind=6)

        l1_ratio = setParam('l1_ratio', params, grid, force=False)
        info("Param: l1_ratio = {0}".format(l1_ratio), ind=6)

        reg = ElasticNet(alpha=alpha, l1_ratio=l1_ratio)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 3
0
def createLogisticRegressionClassifier(params=None):
    info("Creating Logistic Regression Classifier", ind=4)

    ## Params
    params = mergeParams(LogisticRegression(), params)
    params = mergeParams(LogisticRegressionCV(), params)
    tuneParams = getLogisticRegressionClassifer()
    grid = tuneParams['grid']

    ## Estimator
    if params.get('cv'):
        info("Using Built-In Cross Validation With Parameters", ind=4)
        tuneParams = getLogisticRegressionClassifer(cv=True)
        grid = tuneParams['grid']

        Cs = setParam('Cs', params, grid, force=True)
        info("Param: Cs = {0}".format(Cs), ind=6)

        penalty = setParam('penalty', params, grid, force=True)
        info("Param: penalty = {0}".format(penalty), ind=6)

        solver = setParam('solver', params, grid, force=False)
        info("Param: solver = {0}".format(solver), ind=6)

        #n_jobs = -1
        #info("Param: n_jobs = {0}".format(n_jobs), ind=6)

        clf = LogisticRegressionCV(Cs=Cs, penalty=penalty, solver=solver)
    else:
        info("With Parameters", ind=4)
        tuneParams = getLogisticRegressionClassifer(cv=False)
        grid = tuneParams['grid']

        C = setParam('C', params, grid, force=False)
        info("Param: C = {0}".format(C), ind=6)

        penalty = setParam('penalty', params, grid, force=False)
        info("Param: penalty = {0}".format(penalty), ind=6)

        solver = setParam('solver', params, grid, force=False)
        info("Param: solver = {0}".format(solver), ind=6)

        #n_jobs = -1
        #info("Param: n_jobs = {0}".format(n_jobs), ind=6)

        clf = LogisticRegression(C=C, penalty=penalty, solver=solver)

    return {"estimator": clf, "params": tuneParams}
Ejemplo n.º 4
0
def createPassiveAggressiveClassifier(params):
    info("Creating Passive Aggressive Classifier", ind=4)
    error("Does not give probabilities.")
    return {"estimator": None, "params": None}

    ## Params
    params = mergeParams(PassiveAggressiveClassifier(), params)
    tuneParams = getPassiveAggressiveClassifierParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    C = setParam('C', params, grid, force=False)
    info("Param: C = {0}".format(C), ind=6)

    loss = setParam('loss', params, grid, force=False)
    info("Param: loss = {0}".format(loss), ind=6)

    max_iter = setParam('max_iter', params, grid, force=False)
    info("Param: max_iter = {0}".format(max_iter), ind=6)

    tol = setParam('tol', params, grid, force=False)
    info("Param: tol = {0}".format(tol), ind=6)

    ## Estimator
    clf = PassiveAggressiveClassifier(C=C,
                                      loss=loss,
                                      max_iter=max_iter,
                                      tol=tol)

    return {"estimator": clf, "params": tuneParams}
Ejemplo n.º 5
0
Archivo: svm.py Proyecto: tgadf/pymva
def createSVMEpsRbfClassifier(params=None):
    info("Creating SVM Epsilon Rbf Classifier", ind=4)

    ## Params
    params = mergeParams(SVC(), params)
    kernel = 'rbf'
    tuneParams = getSVMEpsClassifierParams(kernel)
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    C = setParam('C', params, grid, force=False)
    info("Param: C = {0}".format(C), ind=6)

    info("Param: kernel = {0}".format(kernel), ind=6)

    gamma = setParam('gamma', params, grid, force=False)
    info("Param: gamma = {0}".format(gamma), ind=6)

    probability = True
    info("Param: probability = {0}".format(probability), ind=6)

    ## estimator
    reg = SVC(C=C, probability=probability, gamma=gamma, kernel=kernel)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 6
0
Archivo: svm.py Proyecto: tgadf/pymva
def createSVMNuRbfClassifier(params=None):
    info("Creating SVM Nu Rbf Classifier", ind=4)

    ## Params
    params = mergeParams(NuSVC(), params)
    kernel = 'rbf'
    tuneParams = getSVMNuClassifierParams(kernel)
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    nu = setParam('nu', params, grid, force=False)
    info("Param: nu = {0}".format(nu), ind=6)

    info("Param: kernel = {0}".format(kernel), ind=6)

    gamma = setParam('gamma', params, grid, force=False)
    info("Param: gamma = {0}".format(gamma), ind=6)

    probability = True
    info("Param: probability = {0}".format(probability), ind=6)

    ## estimator
    reg = NuSVC(kernel=kernel, nu=nu, probability=probability, gamma=gamma)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 7
0
Archivo: svm.py Proyecto: tgadf/pymva
def createKernelRidgeRegressor(params=None):
    info("Creating SVM Regressor", ind=4)

    ## Params
    params = mergeParams(KernelRidge(), params)
    tuneParams = getKernelRidgeRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    alpha = setParam('alpha', params, grid, force=False)
    info("Param: alpha = {0}".format(alpha), ind=6)

    coef0 = setParam('coef0', params, grid, force=False)
    info("Param: coef0 = {0}".format(coef0), ind=6)

    degree = setParam('degree', params, grid, force=False)
    info("Param: degree = {0}".format(degree), ind=6)

    kernel = setParam('kernel', params, grid, force=False)
    info("Param: kernel = {0}".format(kernel), ind=6)

    ## estimator
    reg = KernelRidge(alpha=alpha, coef0=coef0, degree=degree, kernel=kernel)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 8
0
def createARDRegressor(params=None):
    info("Creating ARD Regressor", ind=4)

    ## Params
    params = mergeParams(ARDRegression(), params)
    tuneParams = getARDRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    alpha_1 = setParam('alpha_1', params, grid, force=False)
    info("Param: alpha_1 = {0}".format(alpha_1), ind=6)

    lambda_1 = setParam('lambda_1', params, grid, force=False)
    info("Param: lambda_1 = {0}".format(lambda_1), ind=6)

    alpha_2 = setParam('alpha_2', params, grid, force=False)
    info("Param: alpha_2 = {0}".format(alpha_2), ind=6)

    lambda_2 = setParam('lambda_2', params, grid, force=False)
    info("Param: lambda_2 = {0}".format(lambda_2), ind=6)

    ## estimator
    reg = ARDRegression(alpha_1=alpha_1,
                        alpha_2=alpha_2,
                        lambda_1=lambda_1,
                        lambda_2=lambda_2)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 9
0
def createHuberRegressor(params):
    info("Creating Huber Regressor", ind=4)

    ## Params
    params = mergeParams(HuberRegressor(), params)
    tuneParams = getHuberRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    alpha = setParam('alpha', params, grid, force=False)
    info("Param: alpha = {0}".format(alpha), ind=6)

    epsilon = setParam('epsilon', params, grid, force=False)
    info("Param: epsilon = {0}".format(epsilon), ind=6)

    max_iter = setParam('max_iter', params, grid, force=False)
    info("Param: max_iter = {0}".format(max_iter), ind=6)

    tol = setParam('tol', params, grid, force=False)
    info("Param: tol = {0}".format(tol), ind=6)

    # estimator
    reg = HuberRegressor(alpha=alpha,
                         epsilon=epsilon,
                         max_iter=max_iter,
                         tol=tol)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 10
0
Archivo: svm.py Proyecto: tgadf/pymva
def createSVMNuRbfRegressor(params=None):
    info("Creating SVM Nu Rbf Regressor", ind=4)

    ## Params
    params = mergeParams(NuSVR(), params)
    kernel = 'rbf'
    tuneParams = getSVMNuRegressorParams(kernel)
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    C = setParam('C', params, grid, force=False)
    info("Param: C = {0}".format(C), ind=6)

    nu = setParam('nu', params, grid, force=False)
    info("Param: nu = {0}".format(nu), ind=6)

    info("Param: kernel = {0}".format(kernel), ind=6)

    gamma = setParam('gamma', params, grid, force=False)
    info("Param: gamma = {0}".format(gamma), ind=6)

    ## estimator
    reg = NuSVR(C=C, kernel=kernel, nu=nu, gamma=gamma)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 11
0
def createRadiusNeighborsRegressor(params=None):
    info("Creating Radius Neighbors Regressor", ind=4)
    error("This doesn't work")
    return {"estimator": None, "params": None}

    ## Params
    params = mergeParams(RadiusNeighborsRegressor(), params)
    tuneParams = getRadiusNeighborsRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    algorithm = setParam('algorithm', params, grid, force=False)
    info("Param: algorithm = {0}".format(algorithm), ind=6)

    leaf_size = setParam('leaf_size', params, grid, force=False)
    info("Param: leaf_size = {0}".format(leaf_size), ind=6)

    metric = setParam('metric', params, grid, force=False)
    info("Param: metric = {0}".format(metric), ind=6)

    radius = setParam('radius', params, grid, force=False)
    info("Param: radius = {0}".format(radius), ind=6)

    weights = setParam('weights', params, grid, force=False)
    info("Param: weights = {0}".format(weights), ind=6)

    ## Estimator
    reg = RadiusNeighborsRegressor(algorithm=algorithm,
                                   leaf_size=leaf_size,
                                   metric=metric,
                                   radius=radius,
                                   weights=weights)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 12
0
def createPassiveAggressiveRegressor(params):
    info("Creating Passive Aggressive Regressor", ind=4)

    ## Params
    params = mergeParams(PassiveAggressiveRegressor(), params)
    tuneParams = getPassiveAggressiveRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    C = setParam('C', params, grid, force=False)
    info("Param: C = {0}".format(C), ind=6)

    loss = setParam('loss', params, grid, force=False)
    info("Param: loss = {0}".format(loss), ind=6)

    max_iter = setParam('max_iter', params, grid, force=False)
    info("Param: max_iter = {0}".format(max_iter), ind=6)

    tol = setParam('tol', params, grid, force=False)
    info("Param: tol = {0}".format(tol), ind=6)

    ## Estimator
    reg = PassiveAggressiveRegressor(C=C,
                                     loss=loss,
                                     max_iter=max_iter,
                                     tol=tol)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 13
0
Archivo: svm.py Proyecto: tgadf/pymva
def createSVMEpsRbfRegressor(params=None):
    info("Creating SVM Epsilon Rbf Regressor", ind=4)

    ## Params
    params = mergeParams(SVR(), params)
    kernel = 'rbf'
    tuneParams = getSVMEpsRegressorParams(kernel)
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    C = setParam('C', params, grid, force=False)
    info("Param: C = {0}".format(C), ind=6)

    epsilon = setParam('epsilon', params, grid, force=False)
    info("Param: epsilon = {0}".format(epsilon), ind=6)

    info("Param: kernel = {0}".format(kernel), ind=6)

    gamma = setParam('gamma', params, grid, force=False)
    info("Param: gamma = {0}".format(gamma), ind=6)

    ## estimator
    reg = SVR(C=C, epsilon=epsilon, gamma=gamma, kernel=kernel)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 14
0
def createLDAClassifier(params = None):
    info("Creating LDA Classifier", ind=4)
    
    ## Params
    params     = mergeParams(LinearDiscriminantAnalysis(), params)
    tuneParams = getLinearDiscriminantAnalysisParams()
    grid       = tuneParams['grid']

    
    info("With Parameters", ind=6)
    n_components = setParam('n_components', params, grid)
    info("Param: n_components = {0}".format(n_components), ind=6)
    
    solver = setParam('solver', params, grid)
    info("Param: solver = {0}".format(solver), ind=6)
    
    shrinkage = setParam('shrinkage', params, grid)
    info("Param: shrinkage = {0}".format(shrinkage), ind=6)
    
    
    ## Estimator
    clf = LinearDiscriminantAnalysis(n_components=n_components, 
                                     solver=solver, shrinkage=shrinkage)
    
    return {"estimator": clf, "params": tuneParams}
Ejemplo n.º 15
0
def createOrthogonalMatchingPursuitRegressor(params=None):
    info("Creating Orthogonal Matching Pursuit Regressor", ind=4)

    ## Params
    params = mergeParams(OrthogonalMatchingPursuit(), params)
    params = mergeParams(OrthogonalMatchingPursuitCV(), params)
    tuneParams = getOrthogonalMatchingPursuitRegressorParams()

    ## estimator
    if params.get('cv') is True:
        info("Using Built-In Cross Validation With Parameters", ind=4)
        reg = OrthogonalMatchingPursuitCV()
    else:
        info("Without Parameters", ind=4)
        reg = OrthogonalMatchingPursuit()

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 16
0
Archivo: nn.py Proyecto: tgadf/pymva
def createMLPRegressor(params=None):
    info("Creating MLP Regressor", ind=4)

    ## Params
    params = mergeParams(MLPRegressor(), params)
    tuneParams = getMLPRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    activation = setParam('activation', params, grid, force=False)
    info("Param: activation = {0}".format(activation), ind=6)

    alpha = setParam('alpha', params, grid, force=False)
    info("Param: alpha = {0}".format(alpha), ind=6)

    alpha = setParam('alpha', params, grid, force=False)
    info("Param: alpha = {0}".format(alpha), ind=6)

    beta_1 = setParam('beta_1', params, grid, force=False)
    info("Param: beta_1 = {0}".format(beta_1), ind=6)

    beta_2 = setParam('beta_2', params, grid, force=False)
    info("Param: beta_2 = {0}".format(beta_2), ind=6)

    hidden_layer_sizes = setParam('hidden_layer_sizes',
                                  params,
                                  grid,
                                  force=False)
    info("Param: hidden_layer_sizes = {0}".format(hidden_layer_sizes), ind=6)

    learning_rate = setParam('learning_rate', params, grid, force=False)
    info("Param: learning_rate = {0}".format(learning_rate), ind=6)

    max_iter = setParam('max_iter', params, grid, force=False)
    info("Param: max_iter = {0}".format(max_iter), ind=6)

    momentum = setParam('momentum', params, grid, force=False)
    info("Param: momentum = {0}".format(momentum), ind=6)

    power_t = setParam('power_t', params, grid, force=False)
    info("Param: power_t = {0}".format(power_t), ind=6)

    solver = setParam('solver', params, grid, force=False)
    info("Param: solver = {0}".format(solver), ind=6)

    reg = MLPRegressor(activation=activation,
                       alpha=alpha,
                       beta_1=beta_1,
                       beta_2=beta_2,
                       hidden_layer_sizes=hidden_layer_sizes,
                       learning_rate=learning_rate,
                       max_iter=max_iter,
                       momentum=momentum,
                       power_t=power_t,
                       solver=solver)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 17
0
def createIsotonicRegressor(params=None):
    info("Creating Isotonic Regressor", ind=4)

    ## Params
    params = mergeParams(IsotonicRegression(), params)
    tuneParams = getIsotonicRegressionParams()

    info("Without Parameters", ind=4)
    reg = IsotonicRegression(increasing="auto")

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 18
0
def createRANSACRegressor(params):
    info("Creating TheilSen Regressor", ind=4)

    ## Params
    params = mergeParams(RANSACRegressor(), params)
    tuneParams = getRANSACRegressorParams()
    info("Without Parameters", ind=4)

    ## estimator
    reg = RANSACRegressor()

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 19
0
def createLinearRegressor(params=None):
    info("Creating Linear Regressor", ind=4)

    ## Params
    params = mergeParams(LinearRegression(), params)
    tuneParams = getLinearRegressorParams()
    info("Without Parameters", ind=4)

    ## estimator
    reg = LinearRegression()

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 20
0
def createEARTHRegressor(params=None):
    info("Creating EARTH Regressor", ind=4)

    ## Params
    params = mergeParams(Earth(), params)
    tuneParams = getEarthParams()

    info("Without Parameters", ind=4)

    # Estimator
    reg = Earth()

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 21
0
def createGaussianNaiveBayesClassifier(params):
    info("Creating Gaussian Naive Bayes Classifier", ind=4)

    ## Params
    params = mergeParams(GaussianNB(), params)
    tuneParams = getGaussianNaiveBayesClassifierParams()

    info("Without Parameters", ind=4)

    ## Estimator
    clf = GaussianNB()

    return {"estimator": clf, "params": tuneParams}
Ejemplo n.º 22
0
def createSGDRegressor(params):
    info("Creating SGD Regressor", ind=4)

    ## Params
    params = mergeParams(SGDRegressor(), params)
    tuneParams = getSGDRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    alpha = setParam('alpha', params, grid, force=False)
    info("Param: alpha = {0}".format(alpha), ind=6)

    epsilon = setParam('epsilon', params, grid, force=False)
    info("Param: epsilon = {0}".format(epsilon), ind=6)

    eta0 = setParam('eta0', params, grid, force=False)
    info("Param: eta0 = {0}".format(eta0), ind=6)

    l1_ratio = setParam('l1_ratio', params, grid, force=False)
    info("Param: l1_ratio = {0}".format(l1_ratio), ind=6)

    learning_rate = setParam('learning_rate', params, grid, force=False)
    info("Param: learning_rate = {0}".format(learning_rate), ind=6)

    loss = setParam('loss', params, grid, force=False)
    info("Param: loss = {0}".format(loss), ind=6)

    max_iter = setParam('max_iter', params, grid, force=False)
    info("Param: max_iter = {0}".format(max_iter), ind=6)

    penalty = setParam('penalty', params, grid, force=False)
    info("Param: penalty = {0}".format(penalty), ind=6)

    power_t = setParam('power_t', params, grid, force=False)
    info("Param: power_t = {0}".format(power_t), ind=6)

    tol = setParam('tol', params, grid, force=False)
    info("Param: tol = {0}".format(tol), ind=6)

    ## Estimator
    reg = SGDRegressor(alpha=alpha,
                       epsilon=epsilon,
                       eta0=eta0,
                       l1_ratio=l1_ratio,
                       learning_rate=learning_rate,
                       loss=loss,
                       penalty=penalty,
                       power_t=power_t)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 23
0
Archivo: tree.py Proyecto: tgadf/pymva
def createDecisionTreeClassifier(params):
    info("Creating Decision Tree Classifier", ind=4)

    ## Params
    params = mergeParams(DecisionTreeClassifier(), params)
    tuneParams = getDecisionTreeClassifierParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=6)
    criterion = setParam('criterion', params, grid)
    info("Param: criterion = {0}".format(criterion), ind=6)

    max_depth = setParam('max_depth', params, grid)
    info("Param: max_depth = {0}".format(max_depth), ind=6)

    max_features = setParam('max_features', params, grid)
    info("Param: max_features = {0}".format(max_features), ind=6)

    max_leaf_nodes = setParam('max_leaf_nodes', params, grid)
    info("Param: max_leaf_nodes = {0}".format(max_leaf_nodes), ind=6)

    min_impurity_decrease = setParam('min_impurity_decrease', params, grid)
    info("Param: min_impurity_decrease = {0}".format(min_impurity_decrease),
         ind=6)

    min_samples_leaf = setParam('min_samples_leaf', params, grid)
    info("Param: min_samples_leaf = {0}".format(min_samples_leaf), ind=6)

    min_samples_split = setParam('min_samples_split', params, grid)
    info("Param: min_samples_split = {0}".format(min_samples_split), ind=6)

    min_weight_fraction_leaf = setParam('min_weight_fraction_leaf', params,
                                        grid)
    info("Param: min_weight_fraction_leaf = {0}".format(
        min_weight_fraction_leaf),
         ind=6)

    ## Estimator
    reg = DecisionTreeClassifier(
        criterion=criterion,
        max_depth=max_depth,
        max_features=max_features,
        max_leaf_nodes=max_leaf_nodes,
        min_impurity_decrease=min_impurity_decrease,
        min_samples_leaf=min_samples_leaf,
        min_samples_split=min_samples_split,
        min_weight_fraction_leaf=min_weight_fraction_leaf)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 24
0
def createGaussianProcessClassifier(params=None):
    info("Creating Gaussian Process Classifier", ind=4)
    error("This takes forever. Don't use it")
    return {"estimator": None, "params": None}

    ## Params
    params = mergeParams(GaussianProcessClassifier(), params)
    tuneParams = getGaussianProcessClassifierParams()

    info("Without Parameters", ind=4)
    kernel = kernels.ConstantKernel()

    ## Estimator
    reg = GaussianProcessClassifier(kernel=kernel)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 25
0
def createBernoulliNaiveBayesClassifier(params):
    info("Creating Bernoulli Naive Bayes Classifier", ind=4)

    ## Params
    params = mergeParams(BernoulliNB(), params)
    tuneParams = getBernoulliNaiveBayesClassifierParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    alpha = setParam('alpha', params, grid, force=False)
    info("Param: alpha = {0}".format(alpha), ind=6)

    ## Estimator
    clf = BernoulliNB(alpha=alpha)

    return {"estimator": clf, "params": tuneParams}
Ejemplo n.º 26
0
def createGBMRegressor(params):
    info("Creating GBM Regressor", ind=4)

    ## Params
    params = mergeParams(GradientBoostingRegressor(), params)
    tuneParams = getGradientBoostingRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=6)
    criterion = setParam('criterion', params, grid)
    info("Param: criterion = {0}".format(criterion), ind=6)

    learning_rate = setParam('learning_rate', params, grid)
    info("Param: learning_rate = {0}".format(learning_rate), ind=6)

    loss = setParam('loss', params, grid)
    info("Param: loss = {0}".format(loss), ind=6)

    max_depth = setParam('max_depth', params, grid)
    info("Param: max_depth = {0}".format(max_depth), ind=6)

    max_features = setParam('max_features', params, grid)
    info("Param: max_features = {0}".format(max_features), ind=6)

    min_impurity_decrease = setParam('min_impurity_decrease', params, grid)
    info("Param: min_impurity_decrease = {0}".format(min_impurity_decrease),
         ind=6)

    min_samples_leaf = setParam('min_samples_leaf', params, grid)
    info("Param: min_samples_leaf = {0}".format(min_samples_leaf), ind=6)

    n_estimators = setParam('n_estimators', params, grid)
    info("Param: n_estimator = {0}".format(n_estimators), ind=6)

    ## Estimator
    reg = GradientBoostingRegressor(
        criterion=criterion,
        learning_rate=learning_rate,
        loss=loss,
        max_depth=max_depth,
        max_features=max_features,
        min_impurity_decrease=min_impurity_decrease,
        min_samples_leaf=min_samples_leaf,
        n_estimators=n_estimators)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 27
0
def createRandomForestRegressor(params):
    info("Creating Random Forest Regressor", ind=4)

    ## Params
    params = mergeParams(RandomForestRegressor(), params)
    tuneParams = getRandomForestRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=6)
    bootstrap = setParam('bootstrap', params, grid)
    info("Param: bootstrap = {0}".format(bootstrap), ind=6)

    criterion = setParam('criterion', params, grid)
    info("Param: criterion = {0}".format(criterion), ind=6)

    max_depth = setParam('max_depth', params, grid)
    info("Param: max_depth = {0}".format(max_depth), ind=6)

    max_features = setParam('max_features', params, grid)
    info("Param: max_features = {0}".format(max_features), ind=6)

    min_impurity_decrease = setParam('min_impurity_decrease', params, grid)
    info("Param: min_impurity_decrease = {0}".format(min_impurity_decrease),
         ind=6)

    min_samples_leaf = setParam('min_samples_leaf', params, grid)
    info("Param: min_samples_leaf = {0}".format(min_samples_leaf), ind=6)

    n_estimators = setParam('n_estimators', params, grid)
    info("Param: n_estimator = {0}".format(n_estimators), ind=6)

    n_jobs = setParam('n_jobs', params, grid)
    n_jobs = -1
    info("Param: n_jobs = {0}".format(n_jobs), ind=6)

    ## Estimator
    reg = RandomForestRegressor(bootstrap=bootstrap,
                                criterion=criterion,
                                max_depth=max_depth,
                                max_features=max_features,
                                min_impurity_decrease=min_impurity_decrease,
                                min_samples_leaf=min_samples_leaf,
                                n_estimators=n_estimators,
                                n_jobs=n_jobs)

    return {"estimator": reg, "params": tuneParams}
Ejemplo n.º 28
0
def createMultinomialNaiveBayesClassifier(params):
    info("Creating Multinomial Naive Bayes Classifier", ind=4)
    error("Multinomial Naive Bayes Classifier does not work", ind=4)
    return {"estimator": None, "params": None}

    ## Params
    params = mergeParams(MultinomialNB(), params)
    tuneParams = getMultinomialNaiveBayesClassifierParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    alpha = setParam('alpha', params, grid, force=False)
    info("Param: alpha = {0}".format(alpha), ind=6)

    ## Estimator
    clf = MultinomialNB(alpha=alpha)

    return {"estimator": clf, "params": tuneParams}
Ejemplo n.º 29
0
def createQDAClassifier(params = None):
    info("Creating QDA Classifier", ind=4)
    
    ## Params
    params     = mergeParams(QuadraticDiscriminantAnalysis(), params)
    tuneParams = getQuadraticDiscriminantAnalysisParams()
    grid       = tuneParams['grid']

    
    info("With Parameters", ind=6)
    reg_param = setParam('reg_param', params, grid)
    info("Param: reg_param = {0}".format(reg_param), ind=6)
    
    
    ## Estimator
    clf = QuadraticDiscriminantAnalysis(reg_param=reg_param)
    
    return {"estimator": clf, "params": tuneParams}
Ejemplo n.º 30
0
Archivo: svm.py Proyecto: tgadf/pymva
def createSVMLinearRegressor(params=None):
    info("Creating SVM Linear Regressor", ind=4)

    ## Params
    params = mergeParams(LinearSVR(), params)
    tuneParams = getSVMLinearRegressorParams()
    grid = tuneParams['grid']

    info("With Parameters", ind=4)
    C = setParam('C', params, grid, force=False)
    info("Param: C = {0}".format(C), ind=6)

    loss = setParam('loss', params, grid, force=False)
    info("Param: loss = {0}".format(loss), ind=6)

    ## estimator
    reg = LinearSVR(C=C, loss=loss)

    return {"estimator": reg, "params": tuneParams}