Exemple #1
0
    def __new__(self, y, clf='lda', kern='rbf', n_knn=10, n_tree=100,
                priors=False, **kwargs):

        # Use a pre-defined classifier :
        if isinstance(clf, (str, int)):
            # Default value for priors :
            priors = np.array([1/len(np.unique(y))]*len(np.unique(y)))

            if isinstance(clf, str):
                clf = clf.lower()

            # LDA :
            if clf == 'lda' or clf == 0:
                clfObj = LinearDiscriminantAnalysis(
                    priors=priors, **kwargs)
                clfObj.lgStr = 'Linear Discriminant Analysis'
                clfObj.shStr = 'LDA'

            # SVM : ‘linear’, ‘poly’, ‘rbf’, ‘sigmoid’, ‘precomputed’ or a callable
            elif clf == 'svm' or clf == 1:
                clfObj = SVC(kernel=kern, probability=True, **kwargs)
                clfObj.lgStr = 'Support Vector Machine (kernel=' + kern + ')'
                clfObj.shStr = 'SVM-' + kern

            # Linear SVM:
            elif clf == 'linearsvm' or clf == 2:
                clfObj = LinearSVC(**kwargs)
                clfObj.lgStr = 'Linear Support Vector Machine'
                clfObj.shStr = 'LSVM'

            # Nu SVM :
            elif clf == 'nusvm' or clf == 3:
                clfObj = NuSVC(**kwargs)
                clfObj.lgStr = 'Nu Support Vector Machine'
                clfObj.shStr = 'NuSVM'

            # Naive Bayesian :
            elif clf == 'nb' or clf == 4:
                clfObj = GaussianNB(**kwargs)
                clfObj.lgStr = 'Naive Baysian'
                clfObj.shStr = 'NB'

            # KNN :
            elif clf == 'knn' or clf == 5:
                clfObj = KNeighborsClassifier(n_neighbors=n_knn, **kwargs)
                clfObj.lgStr = 'k-Nearest Neighbor (neighbor=' + str(n_knn) + ')'
                clfObj.shStr = 'KNN-' + str(n_knn)

            # Random forest :
            elif clf == 'rf' or clf == 6:
                clfObj = RandomForestClassifier(n_estimators=n_tree, **kwargs)
                clfObj.lgStr = 'Random Forest (tree=' + str(n_tree) + ')'
                clfObj.shStr = 'RF-' + str(n_tree)

            # Logistic regression :
            elif clf == 'lr' or clf == 7:
                clfObj = LogisticRegression(**kwargs)
                clfObj.lgStr = 'Logistic Regression'
                clfObj.shStr = 'LogReg'

            # QDA :
            elif clf == 'qda' or clf == 8:
                clfObj = QuadraticDiscriminantAnalysis(**kwargs)
                clfObj.lgStr = 'Quadratic Discriminant Analysis'
                clfObj.shStr = 'QDA'

            else:
                raise ValueError('No classifier "'+str(clf)+'"" found')

        # Use a custom classifier :
        else:
            clfObj = clf
            clfObj.shStr = 'custom'
            clfObj.lgStr = 'Custom classifier'


        return clfObj
Exemple #2
0
    def __new__(self,
                y,
                clf='lda',
                kern='rbf',
                n_knn=10,
                n_tree=100,
                priors=False,
                **kwargs):

        # Use a pre-defined classifier :
        if isinstance(clf, (str, int)):
            # Default value for priors :
            priors = np.array([1 / len(np.unique(y))] * len(np.unique(y)))

            if isinstance(clf, str):
                clf = clf.lower()

            # LDA :
            if clf == 'lda' or clf == 0:
                clfObj = LinearDiscriminantAnalysis(priors=priors, **kwargs)
                clfObj.lgStr = 'Linear Discriminant Analysis'
                clfObj.shStr = 'LDA'

            # SVM : ‘linear’, ‘poly’, ‘rbf’, ‘sigmoid’, ‘precomputed’ or a callable
            elif clf == 'svm' or clf == 1:
                clfObj = SVC(kernel=kern, probability=True, **kwargs)
                clfObj.lgStr = 'Support Vector Machine (kernel=' + kern + ')'
                clfObj.shStr = 'SVM-' + kern

            # Linear SVM:
            elif clf == 'linearsvm' or clf == 2:
                clfObj = LinearSVC(**kwargs)
                clfObj.lgStr = 'Linear Support Vector Machine'
                clfObj.shStr = 'LSVM'

            # Nu SVM :
            elif clf == 'nusvm' or clf == 3:
                clfObj = NuSVC(**kwargs)
                clfObj.lgStr = 'Nu Support Vector Machine'
                clfObj.shStr = 'NuSVM'

            # Naive Bayesian :
            elif clf == 'nb' or clf == 4:
                clfObj = GaussianNB(**kwargs)
                clfObj.lgStr = 'Naive Baysian'
                clfObj.shStr = 'NB'

            # KNN :
            elif clf == 'knn' or clf == 5:
                clfObj = KNeighborsClassifier(n_neighbors=n_knn, **kwargs)
                clfObj.lgStr = 'k-Nearest Neighbor (neighbor=' + str(
                    n_knn) + ')'
                clfObj.shStr = 'KNN-' + str(n_knn)

            # Random forest :
            elif clf == 'rf' or clf == 6:
                clfObj = RandomForestClassifier(n_estimators=n_tree, **kwargs)
                clfObj.lgStr = 'Random Forest (tree=' + str(n_tree) + ')'
                clfObj.shStr = 'RF-' + str(n_tree)

            # Logistic regression :
            elif clf == 'lr' or clf == 7:
                clfObj = LogisticRegression(**kwargs)
                clfObj.lgStr = 'Logistic Regression'
                clfObj.shStr = 'LogReg'

            # QDA :
            elif clf == 'qda' or clf == 8:
                clfObj = QuadraticDiscriminantAnalysis(**kwargs)
                clfObj.lgStr = 'Quadratic Discriminant Analysis'
                clfObj.shStr = 'QDA'

            else:
                raise ValueError('No classifier "' + str(clf) + '"" found')

        # Use a custom classifier :
        else:
            clfObj = clf
            clfObj.shStr = 'custom'
            clfObj.lgStr = 'Custom classifier'

        return clfObj