Exemple #1
0
def tuneSGDParametersIndividually(decentNValues):
    # This should be a list of all your parameters with a wide range of possible values
    parameterGrid = {
        "loss": [
            'squared_loss', 'huber', 'epsilon_insensitive',
            'squared_epsilon_insensitive'
        ],
        "penalty": ['none', 'l2', 'l1', 'elasticnet'],
        "alpha":
        np.arange(0.00001, 100, 10),
        "l1_ratio":
        np.arange(0, 1, 0.1),
        "fit_intercept": [True, False],
        "shuffle": [True, False],
        "epsilon":
        np.arange(0.00001, 100, 10),
        "learning_rate": ['constant', 'optimal', 'invscaling']
    }

    threeBestParams = machineLearning.tuneParametersIndividually(
        parameterGrid,
        regressorName,
        regressor,
        jsonFileNames,
        decentNValue,
        2,
        dataSource='reddit')
    return threeBestParams
Exemple #2
0
def tuneKNeighborsParametersIndividually(decentNValues):
    # This should be a list of all your parameters with a wide range of possible values
    parameterGrid = {
        "n_neighbors": np.arange(5, 100, 5),
        "leaf_size": np.arange(1, 100, 5),
        "weights": ['uniform', 'distance'],
        "algorithm": ['ball_tree', 'kd_tree', 'brute'],
    }

    threeBestParams = machineLearning.tuneParametersIndividually(
        parameterGrid, classifierName, classifier, jsonFileNames, decentNValue,
        2)
    return threeBestParams
Exemple #3
0
def tuneBaggingParametersIndividually(decentNValues):
    parameterGrid = {
        'n_estimators': np.arange(10,500,40),
        'max_samples': [100],
        'max_features': [5, 10],
        'bootstrap': [True],
        'bootstrap_features': [True],
        'warm_start': [True],
    }

    threeBestParams = machineLearning.tuneParametersIndividually(parameterGrid, classifierName, classifier,
                                                                 jsonFileNames,
                                                                 decentNValue, 2)
    return threeBestParams
def tuneRandomForestParametersIndividually(decentNValues):
    # This should be a list of all your parameters with a wide range of possible values
    parameterGrid = {
        "n_estimators": np.arange(10, 500, 40),
        "max_depth": np.arange(1, 14, 1),
        "min_samples_split": np.arange(2, 203, 10),
        "min_samples_leaf": np.arange(1, 81, 4),
        "max_leaf_nodes": np.arange(2, 20, 1),
        "min_weight_fraction_leaf": np.arange(0.1, 0.4, 0.1),
        "max_features": ["auto", "sqrt", "log2"]
    }

    threeBestParams = machineLearning.tuneParametersIndividually(parameterGrid, classifierName, classifier,
                                                                 jsonFileNames,
                                                                 decentNValue, 2, dataSource='reddit')
    return threeBestParams
Exemple #5
0
def tuneBaggingRegressorParametersIndividually(decentNValues):
    # This should be a list of all your parameters with a wide range of possible values
    parameterGrid = {
        'n_estimators': np.arange(10, 500, 40),
        'max_samples': [100, 500, 1000],
        'max_features': [10, 5],
        'bootstrap': [True],
        'bootstrap_features': [True],
        'warm_start': [True],
    }

    threeBestParams = machineLearning.tuneParametersIndividually(
        parameterGrid,
        classifierName,
        classifier,
        jsonFileNames,
        decentNValue,
        3,
        dataSource='reddit')
    return threeBestParams
Exemple #6
0
# WHAT ARE THE BEST VALUES OF EACH PARAMETER?
# ______________________________________________________
#

# This should be a list of all your parameters with a wide range of possible values
parameterGrid = {
    "C": [0.001, 0.01, 0.1, 1, 10, 100, 1000],
    "fit_intercept": [True, False],
    "solver": ['newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga'],
    "verbose": np.arange(0, 100, 4),
    "warm_start": [True, False],
    "random_state": np.arange(0, 100, 4)
}

machineLearning.tuneParametersIndividually(parameterGrid, classifierName,
                                           classifier, jsonFileNames,
                                           decentNValue)

# ______________________________________________________
# WHAT IS THE BEST COMBINATION OF VARIABLES?
# ______________________________________________________
#

# Fill this grid with only the best parameter values, as every single combination will be run
parameterGrid = {
    "C": [1000, 0.01, 10],
    "fit_intercept": [False],
    "solver": ['lbfgs', 'liblinear'],
    "verbose": [56, 68, 84, 24],
    "warm_start": [False],
    "random_state": [92, 12, 20, 56]