def __init__(self, C, kernel=LinearKernel(), eps=1e-4, max_iter=consts.MAX_ITER, min_iter=1, info=[]): super(SequentialMinimalOptimization, self).__init__(kernel=kernel, info=info) self.C = max(0, float(C)) self.eps = max(consts.FLOAT_EPSILON, float(eps)) self.min_iter = max(1, int(min_iter)) self.max_iter = max(self.min_iter, int(max_iter))
def __init__(self, X, y, l, kernel=None, penalty_start=0, mean=False): """ Parameters ---------- X : numpy array (n, p) The data matrix. y : numpy array (n, 1) The output vector. Must only contain values of -1 and 1. l : float Must be non-negative. The ridge parameter. kernel : kernel object, optional The kernel for non-linear SVM, of type parsimony.algorithms.utils.Kernel. Default is a linear kernel. penalty_start : int Must be non-negative. The number of columns, variables etc., to except from penalisation. Equivalently, the first index to be penalised. Default is 0, all columns are included. mean : bool Whether to compute the squared loss or the mean squared loss. Default is False, the loss. """ self.X = X self.y = y self.l = max(0.0, float(l)) if kernel is None: from parsimony.algorithms.utils import LinearKernel self.kernel = LinearKernel(X=self.X, use_cache=True) self._reset_kernel = True else: self.kernel = kernel self._reset_kernel = False self.penalty_start = max(0, int(penalty_start)) self.mean = bool(mean) self.reset()