Exemplo n.º 1
0
def svr():
    """Epsilon-Support Vector Regression, using the RBF kernel

    :return: a SVR object
    """
    from sklearn.svm import SVR
    clf = SVR()
    return ScikitLearnClassifier(clf)
Exemplo n.º 2
0
def elastic_net_regression():
    """L1+L2-regularized least squares linear classifier trained using Coordinate Descent

    :return: an ElasticNet object
    """

    from sklearn.linear_model import ElasticNet
    clf = ElasticNet()
    return ScikitLearnClassifier(clf)
Exemplo n.º 3
0
def ridge_regression():
    """ L2-regularized least squares linear classifier

    :return: a Ridge object
    """

    from sklearn.linear_model import Ridge
    clf = Ridge()
    return ScikitLearnClassifier(clf)
Exemplo n.º 4
0
def sgd_regressor():
    """Linear classifier fitted by minimizing a regularized empirical loss with Stochastic Gradient Descent

    :return: a SGDRegressor object
    """

    from sklearn.linear_model import SGDRegressor
    clf = SGDRegressor()
    return ScikitLearnClassifier(clf)
Exemplo n.º 5
0
def ard_regression(num_iter=300):
    """Bayesian Automated Relevance Determination regression.

    :param num_iter: maximum number of iterations, default 300
    :return: a ARDRegression object
    """

    from sklearn.linear_model import ARDRegression
    clf = ARDRegression(n_iter=num_iter)
    return ScikitLearnClassifier(clf)
Exemplo n.º 6
0
def naive_bayes():
    """Naive Bayes algorithm for classification

    :return: a ScikitLearner object, containing a GaussianNB learner
    """
    from sklearn.naive_bayes import GaussianNB
    gaussian_NB_learner = GaussianNB()

    # return gaussianNBLearner
    return ScikitLearnClassifier(gaussian_NB_learner)
Exemplo n.º 7
0
def logistic_regression(penalty="l1", c=1.0):
    """Logistic regression classifier.

    :param penalty: the penalty, (string) used to specify the norm used in the penalization. ‘l1’ or ‘l2’.
    :param c: Inverse of regularization strength; must be a positive float. Like in support vector machines, smaller values specify stronger regularization.
    :return: a LogisticRegression object
    """
    from sklearn.linear_model import LogisticRegression
    clf = LogisticRegression(penalty=str(penalty), C=float(c))
    return ScikitLearnClassifier(clf)
Exemplo n.º 8
0
def lasso_LARS(alpha=1.0):
    """L1-regularized least squares linear classifier trained with Least Angle Regression. alpha=constant that multiplies the penalty term, default 1.0

    :param alpha: Constant that multiplies the penalty term.
    :return: a LassoLars object
    """

    from sklearn.linear_model import LassoLars
    clf = LassoLars(alpha=alpha)

    return ScikitLearnClassifier(clf)
Exemplo n.º 9
0
def J48(max_features="auto", depth=None):
    """ Creates a J48 decision tree classifier

    :param max_features: The number of features to consider when looking for the best split
    :param depth: The maximum depth of the tree
    :return: a DecisionTreeClassifier object
    """

    from sklearn import tree
    clf = tree.DecisionTreeClassifier(max_features=max_features,
                                      max_depth=depth)

    return ScikitLearnClassifier(clf)
Exemplo n.º 10
0
def SVC(penalty=1.0, kernel="rbf", degree=3):
    """Support Vector Machines with kernels based on libsvm

    :param penalty: Penalty parameter C of the error term. float
    :param kernel: Specifies the kernel type to be used in the algorithm. string
    :param degree: Degree of the polynomial kernel function (‘poly’). int
    :return: a SVC object.
    """

    from sklearn.svm import SVC
    # clf = SVC(C=float(input_dict["penalty"]), kernel=str(input_dict["kernel"]), degree=int(input_dict["degree"]))
    clf = SVC(C=float(penalty), kernel=str(kernel), degree=int(degree))
    return ScikitLearnClassifier(clf)
Exemplo n.º 11
0
def regression_tree(max_features="auto", max_depth=None):
    """Decision tree for regression problems

    :param featureIn: The number of features to consider when looking for the best split: If int, then consider max_features features at each split; If float, then max_features is a percentage and int(max_features * n_features) features are considered at each split
    :param max_depth: The maximum depth of the tree
    :return: a DecisionTreeRegressor object
    """

    from sklearn import tree

    clf = tree.DecisionTreeRegressor(max_features=max_features,
                                     max_depth=max_depth)

    return ScikitLearnClassifier(clf)
Exemplo n.º 12
0
def k_nearest_neighbors(num_neighbors=5, weithgs='uniform', alg='auto'):
    """k-Nearest Neighbors classifier based on the ball tree datastructure for low dimensional data and brute force search for high dimensional data

    :param num_neighbors: Number of neighbors to use by default for k_neighbors queries.
    :param weithgs: Weight function used in prediction.
    :param alg: Algorithm used to compute the nearest neighbors.
    :return: a KNeighborsClassifier object.
    """

    from sklearn.neighbors import KNeighborsClassifier
    knn = KNeighborsClassifier(n_neighbors=int(num_neighbors),
                               weights=str(weithgs),
                               algorithm=str(alg))
    return ScikitLearnClassifier(knn)
Exemplo n.º 13
0
def linear_SVC(c=1.0, loss="l2", penalty="l2", multi_class="ovr"):
    """Support Vector Regression, implemented in terms of liblinear

    :param c: Penalty parameter C of the error term. float, default=1.0
    :param loss: Specifies the loss function. string, ‘l1’ or ‘l2’, default=’l2’
    :param penalty: Specifies the norm used in the penalization. string, ‘l1’ or ‘l2’, default 'l2'
    :param multi_class:
    :return: a LinearSVC object.
    """
    from sklearn.svm import LinearSVC
    clf = LinearSVC(C=float(c),
                    loss=str(loss),
                    penalty=str(penalty),
                    multi_class=str(multi_class),
                    dual=True)

    return ScikitLearnClassifier(clf)