예제 #1
0
    def predict(self):
        """
        trains the scikit-learn  python machine learning algorithm library function
        https://scikit-learn.org

        then passes the trained algorithm the features set and returns the
        predicted y test values form, the function

        then compares the y_test values from scikit-learn predicted to
        y_test values passed in

        then returns the accuracy
        """

        algorithm = MLPClassifier(
            solver=get_ohe_config().mlp_solver,
            alpha=get_ohe_config().MLP_alpha,
            max_iter=400,
            hidden_layer_sizes=(get_ohe_config().MLP_layers,
                                get_ohe_config().MLP_neurons),
            random_state=get_ohe_config().MLP_random_state)
        algorithm.fit(self.X_train, self.y_train)
        y_pred = list(algorithm.predict(self.X_test))
        self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
        return self.acc
예제 #2
0
    def predict(self):
        """
Random forest

like its name implies, consists of a large number of individual decision trees
that operate as an ensemble. Each individual tree in the random forest spits out a
class prediction and the class with the most votes becomes our model’s prediction
A large number of relatively uncorrelated models (trees) operating as a
committee will outperform any of the individual constituent models.

The low correlation between models is the key. Just like how investments with low correlations
(like stocks and bonds) come together to form a portfolio that is greater than the sum of its parts,
uncorrelated models can produce ensemble predictions that are more accurate than any of the
individual predictions. The reason for this wonderful effect is that the trees protect each other
from their individual errors (as long as they don’t constantly all err in the same direction).
While some trees may be wrong, many other trees will be right, so as a group the trees are able to
move in the correct direction. So the prerequisites for random forest to perform well are:

There needs to be some actual signal in our features so that models built using those features do better than random guessing.
The predictions (and therefore the errors) made by the individual trees need to have low correlations with each other.


 Parameters
----------
X_test: array
    testing features

X_train: array
    training features

y_test: array
    testing label

y_train: array
    testing label

Returns:
----------
    target: array - label to be predicted or classified


        trains the scikit-learn  python machine learning algorithm library function
        https://scikit-learn.org

        then passes the trained algorithm the features set and returns the
        predicted y test values form, the function

        then compares the y_test values from scikit-learn predicted to
        y_test values passed in

        then returns the accuracy
        """
        algorithm = RandomForestClassifier(
            n_estimators=get_ohe_config().rf_estimators,
            max_depth=get_ohe_config().rf_max_depth)
        algorithm.fit(self.X_train, self.y_train)
        y_pred = list(algorithm.predict(self.X_test))
        self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
        return self.acc
예제 #3
0
 def predict(self):
     algorithm = MLPClassifier(
         solver=get_ohe_config().mlp_solver,
         alpha=get_ohe_config().mlp_alpha,
         hidden_layer_sizes=(get_ohe_config().mlp_layers,
                             get_ohe_config().mlp_neurons),
         random_state=get_ohe_config().mlp_random_state)
     algorithm.fit(self.X_train, self.y_train)
     y_pred = list(algorithm.predict(self.X_test))
     self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
     return self.acc
예제 #4
0
    def predict(self):
        """
        trains the scikit-learn  python machine learning algorithm library function
        https://scikit-learn.org

        then passes the trained algorithm the features set and returns the
        predicted y test values form, the function

        then compares the y_test values from scikit-learn predicted to
        y_test values passed in

        then returns the accuracy
        """
        algorithm = RandomForestRegressor(
            n_estimators=get_ohe_config().r_n_estimators,
            max_depth=get_ohe_config().r_max_depth)
        algorithm.fit(self.X_train, self.y_train)
        y_pred = list(algorithm.predict(self.X_test))
        self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
        return self.acc
예제 #5
0
파일: svm.py 프로젝트: tmlrnc/axn_ml_2
    def predict(self):
        """
         trains the scikit-learn  python machine learning algorithm library function
         https://scikit-learn.org

         then passes the trained algorithm the features set and returns the
         predicted y test values form, the function

         then compares the y_test values from scikit-learn predicted to
         y_test values passed in

         then returns the accuracy
         """
        algorithm = LinearSVC(random_state=get_ohe_config().lr_random_state,
                              max_iter=-1,
                              multi_class=get_ohe_config().s_multi_class)
        algorithm.fit(self.X_train, self.y_train)
        y_pred = list(algorithm.predict(self.X_test))
        self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
        return self.acc
    def predict(self):
        """
        trains the scikit-learn python machine learning algorithm library function
        https://scikit-learn.org

        then passes the trained algorithm the features set and returns the
        predicted y test values form, the function

        then compares the y_test values from scikit-learn predicted to
        y_test values passed in

        then returns the accuracy
        """
        algorithm = LogisticRegression(
            solver='newton-cg', random_state=get_ohe_config().lr_random_state)
        algorithm.fit(self.X_train, self.y_train)
        y_pred = list(algorithm.predict(self.X_test))
        self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
        return self.acc
예제 #7
0
    def predict(self):
        """
        trains the scikit-learn  python machine learning algorithm library function
        https://scikit-learn.org

        then passes the trained algorithm the features set and returns the
        predicted y test values form, the function

        then compares the y_test values from scikit-learn predicted to
        y_test values passed in

        then returns the accuracy
        """
        algorithm = KNeighborsClassifier(
            n_neighbors=get_ohe_config().kneighbors_classifiernn,
            metric='minkowski')
        algorithm.fit(self.X_train, self.y_train)
        y_pred = list(algorithm.predict(self.X_test))
        self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
        return self.acc
예제 #8
0
    def predict(self):
        """
        trains the scikit-learn  python machine learning algorithm library function
        https://scikit-learn.org

        then passes the trained algorithm the features set and returns the
        predicted y test values form, the function

        then compares the y_test values from scikit-learn predicted to
        y_test values passed in

        then returns the accuracy
        """
        algorithm = KMeans(
            n_clusters=get_ohe_config().kmeans_bins,
            random_state=0)
        algorithm.fit(self.X_train, self.y_train)
        y_pred = list(algorithm.predict(self.X_test))
        self.acc = OneHotPredictor.get_accuracy(y_pred, self.y_test)
        return self.acc
예제 #9
0
def algorithm():
    return DecisionTreeClassifier(
        random_state=get_ohe_config().dtc_random_state)