def SVC(*args, **kwargs): from sklearn.svm import SVC as _SVC # old scikit-learn versions take scale_C as a parameter # new ones don't and default to True if not svcTakesScaleC and "scale_C" in kwargs: del kwargs["scale_C"] return _SVC(*args, **kwargs)
def SVC(*args, **kwargs): from sklearn.svm import SVC as _SVC # old scikit-learn versions take scale_C as a parameter # new ones don't and default to True if not svcTakesScaleC and "scale_C" in kwargs: del kwargs["scale_C"] print(kwargs) return _SVC(*args, **kwargs)
def __init__(self, **kwargs): """Initialize the SVM classifier. All keyword arguments that are not listed will be forwarded to the underlying classifier. In this case, it is sklearn.SVC. For instance, if you pass an argument ``probability=True``, this will be forwarded to the initialization of SVC. Keyword arguments ----------------- max_iter: int, default = 1e6 number of iterations during hyper-parameter tuning k_cross_val: int, default = 5 number cross-validations (k-fold) cross_validation: Boolean, default = True Enable k-fold cross validation for hyper-parameter tuning. If False, the the SVM will use `probability=True` if not specified otherwise in kwargs. """ super(SVM, self).__init__() # initialize some default values for the SVM backend self.max_iter = kwargs.pop('max_iter', 1e6) # parameters for k cross validation / hyper-parameter tuning self.params = [{ 'kernel': ['rbf', 'sigmoid', 'poly'], 'C': [1e1, 1e2, 1e3, 1e4], 'gamma': [1e4, 1e3, 1e2, 1, 1e-1, 1e-2], 'degree': [2, 3, 4] }] self.k_cross_val = kwargs.pop('k_cross_val', 5) # initialize the classifier using grid search to find optimal parameters # via cross validation if kwargs.pop('cross_validation', True): self.clf = GridSearchCV(_SVC(max_iter=self.max_iter, **kwargs), self.params, cv=self.k_cross_val) else: probability = kwargs.pop('probability', True) self.clf = _SVC(max_iter=self.max_iter, probability=probability, **kwargs)
class RecipeExtractionModel(_Enum): KNeighbors = _KNeighborsClassifier(n_neighbors=10, weights='distance') ExtraTrees = _ExtraTreesClassifier(n_estimators=10, max_features=None, min_samples_leaf=75) SVC = _SVC(C=1, probability=True, gamma='auto') MLP = _MLPClassifier(max_iter=1000, ) def __str__(self): return _pprint.pformat(self.value)