class LassoLarsImpl(): def __init__(self, alpha=1.0, fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=2.220446049250313e-16, copy_X=True, fit_path=True, positive=False): self._hyperparams = { 'alpha': alpha, 'fit_intercept': fit_intercept, 'verbose': verbose, 'normalize': normalize, 'precompute': precompute, 'max_iter': max_iter, 'eps': eps, 'copy_X': copy_X, 'fit_path': fit_path, 'positive': positive} self._wrapped_model = Op(**self._hyperparams) def fit(self, X, y=None): if (y is not None): self._wrapped_model.fit(X, y) else: self._wrapped_model.fit(X) return self def predict(self, X): return self._wrapped_model.predict(X)
def connectWidgets(self): # LassoLARS ll = LassoLars() self.alpha_text.setText(str(ll.alpha)) self.fit_interceptCheckBox.setChecked(ll.fit_intercept) self.verboseCheckBox.setChecked(ll.verbose) self.normalizeCheckBox.setChecked(ll.normalize) self.setComboBox(self.precomputeComboBox, ['True', 'False', 'auto', 'array-like']) self.defaultComboItem(self.precomputeComboBox, ll.precompute) self.max_iterSpinBox.setValue(ll.max_iter) self.copy_XCheckBox.setChecked(ll.copy_X) self.fit_pathCheckBox.setChecked(ll.fit_path) self.positiveCheckBox.setChecked(ll.positive) # LassoLarsCV llcv = LassoLarsCV() self.max_n_alphasSpinBox.setValue(llcv.max_n_alphas) self.n_jobsSpinBox.setValue(llcv.n_jobs) # LassoLarsIC llic = LassoLarsIC() self.cvSpinBox.setValue(3) self.setComboBox(self.criterionComboBox, ['aic', 'bic']) self.defaultComboItem(self.criterionComboBox, llic.criterion)
def fit(self, X, y=None): self._sklearn_model = SKLModel(**self._hyperparams) if (y is not None): self._sklearn_model.fit(X, y) else: self._sklearn_model.fit(X) return self
def connectWidgets(self): # LassoLARS ll = LassoLars() self.alpha_text.setText(str(ll.alpha)) self.fit_interceptCheckBox.setChecked(ll.fit_intercept) self.normalizeCheckBox.setChecked(ll.normalize) self.max_iterSpinBox.setValue(ll.max_iter) self.positiveCheckBox.setChecked(ll.positive)
def connectWidgets(self): # LassoLARS ll = LassoLars() self.minalpha_spin.setValue(0.0000001) self.maxalpha_spin.setValue(0.01) self.nalpha_spin.setValue(100) self.fit_intercept_list.setCurrentItem(self.fit_intercept_list.findItems(str(ll.fit_intercept),QtCore.Qt.MatchExactly)[0]) self.normalize_list.setCurrentItem(self.normalize_list.findItems(str(ll.normalize),QtCore.Qt.MatchExactly)[0]) self.max_iterLineEdit.setText(str(ll.max_iter)) self.force_positive_list.setCurrentItem(self.force_positive_list.findItems(str(ll.positive),QtCore.Qt.MatchExactly)[0])
def function(self): model = self.modelComboBox.currentIndex() if model == 0: params = { 'alpha': self.alpha_text.text(), 'fit_intercept': self.fit_interceptCheckBox.isChecked(), 'verbose': self.fit_interceptCheckBox.isChecked(), 'normalize': self.normalizeCheckBox.isChecked(), 'precompute': self.precomputeComboBox.currentText(), 'max_iter': self.max_iterSpinBox.value(), 'copy_X': self.copy_XCheckBox.isChecked(), 'fit_path': self.fit_pathCheckBox.isChecked(), 'positive': self.positiveCheckBox.isChecked(), 'model': model } params_check = dict(params) params_check.pop('model') return params, self.getChangedValues(params_check, LassoLars()) elif model == 1: params = { 'fit_intercept': self.fit_interceptCheckBox.isChecked(), 'verbose': self.fit_interceptCheckBox.isChecked(), 'max_iter': self.max_iterSpinBox.value(), 'normalize': self.normalizeCheckBox.isChecked(), 'precompute': self.precomputeComboBox.currentText(), 'cv': self.cvSpinBox.value(), 'max_n_alphas': self.max_n_alphasSpinBox.value(), 'n_jobs': self.n_jobsSpinBox.value(), 'copy_X': self.copy_XCheckBox.isChecked(), 'positive': self.positiveCheckBox.isChecked(), 'model': model } params_check = dict(params) params_check.pop('model') return params, self.getChangedValues(params_check, LassoLarsCV()) elif model == 2: params = { 'criterion': self.criterionComboBox.currentText(), 'fit_intercept': self.fit_interceptCheckBox.isChecked(), 'verbose': self.fit_interceptCheckBox.isChecked(), 'normalize': self.normalizeCheckBox.isChecked(), 'precompute': self.precomputeComboBox.currentText(), 'max_iter': self.max_iterSpinBox.value(), 'copy_X': self.copy_XCheckBox.isChecked(), 'positive': self.positiveCheckBox.isChecked(), 'model': model } params_check = dict(params) params_check.pop('model') return params, self.getChangedValues(params_check, LassoLarsIC()) else: params = {} print("Error")
def __init__(self, alpha=1.0, fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=2.220446049250313e-16, copy_X=True, fit_path=True, positive=False): self._hyperparams = { 'alpha': alpha, 'fit_intercept': fit_intercept, 'verbose': verbose, 'normalize': normalize, 'precompute': precompute, 'max_iter': max_iter, 'eps': eps, 'copy_X': copy_X, 'fit_path': fit_path, 'positive': positive} self._wrapped_model = Op(**self._hyperparams)
def run(self): params = { 'alpha': self.alpha_text.text(), 'fit_intercept': self.fit_interceptCheckBox.isChecked(), 'verbose': False, 'normalize': self.normalizeCheckBox.isChecked(), 'precompute': True, 'max_iter': self.max_iterSpinBox.value(), 'copy_X': True, 'fit_path': True, 'positive': self.positiveCheckBox.isChecked() } params_check = dict(params) return params, self.getChangedValues(params_check, LassoLars())
def set_learning_method(config, X_train, y_train): """ Instantiates the sklearn's class corresponding to the value set in the configuration file for running the learning method. TODO: use reflection to instantiate the classes @param config: configuration object @return: an estimator with fit() and predict() methods """ estimator = None learning_cfg = config.get("learning", None) if learning_cfg: p = learning_cfg.get("parameters", None) o = learning_cfg.get("optimize", None) scorers = \ set_scorer_functions(learning_cfg.get("scorer", ['mae', 'rmse'])) method_name = learning_cfg.get("method", None) if method_name == "SVR": if o: tune_params = set_optimization_params(o) estimator = optimize_model(SVR(), X_train, y_train, tune_params, scorers, o.get("cv", 5), o.get("verbose", True), o.get("n_jobs", 1)) elif p: estimator = SVR(C=p.get("C", 10), epsilon=p.get('epsilon', 0.01), kernel=p.get('kernel', 'rbf'), degree=p.get('degree', 3), gamma=p.get('gamma', 0.0034), tol=p.get('tol', 1e-3), verbose=False) else: estimator = SVR() elif method_name == "RandomForestRegressor": if o: tune_params = set_optimization_params(o) print tune_params estimator = optimize_model(RandomForestRegressor(), X_train, y_train, tune_params, scorers, o.get("cv", 5), o.get("verbose", True), o.get("n_jobs", 1)) elif p: estimator = RandomForestRegressor( n_estimators=p.get("n_estimators", 100), criterion=p.get("criterion", 'mse'), n_jobs=p.get("n_jobs", -1), random_state=p.get("random_state", 0), max_features=p.get("max_features", 'auto')) elif method_name == "SVC": if o: tune_params = set_optimization_params(o) estimator = optimize_model(SVC(), X_train, y_train, tune_params, scorers, o.get('cv', 5), o.get('verbose', True), o.get('n_jobs', 1)) elif p: estimator = SVC(C=p.get('C', 1.0), kernel=p.get('kernel', 'rbf'), degree=p.get('degree', 3), gamma=p.get('gamma', 0.0), coef0=p.get('coef0', 0.0), tol=p.get('tol', 1e-3), verbose=p.get('verbose', False)) else: estimator = SVC() elif method_name == "LassoCV": if p: estimator = LassoCV(eps=p.get('eps', 1e-3), n_alphas=p.get('n_alphas', 100), normalize=p.get('normalize', False), precompute=p.get('precompute', 'auto'), max_iter=p.get('max_iter', 1000), tol=p.get('tol', 1e-4), cv=p.get('cv', 10), verbose=False) else: estimator = LassoCV() elif method_name == "LassoLars": if o: tune_params = set_optimization_params(o) estimator = optimize_model(LassoLars(), X_train, y_train, tune_params, scorers, o.get("cv", 5), o.get("verbose", True), o.get("n_jobs", 1)) if p: estimator = LassoLars(alpha=p.get('alpha', 1.0), fit_intercept=p.get( 'fit_intercept', True), verbose=p.get('verbose', False), normalize=p.get('normalize', True), max_iter=p.get('max_iter', 500), fit_path=p.get('fit_path', True)) else: estimator = LassoLars() elif method_name == "LassoLarsCV": if p: estimator = LassoLarsCV(max_iter=p.get('max_iter', 500), normalize=p.get('normalize', True), max_n_alphas=p.get( 'max_n_alphas', 1000), n_jobs=p.get('n_jobs', 1), cv=p.get('cv', 10), verbose=False) else: estimator = LassoLarsCV() return estimator, scorers
'Isomap':Isomap(), 'KMeans':KMeans(), 'KNeighborsClassifier':KNeighborsClassifier(), 'KNeighborsRegressor':KNeighborsRegressor(), 'KernelCenterer':KernelCenterer(), 'KernelDensity':KernelDensity(), 'KernelPCA':KernelPCA(), 'KernelRidge':KernelRidge(), 'LSHForest':LSHForest(), 'LabelPropagation':LabelPropagation(), 'LabelSpreading':LabelSpreading(), 'Lars':Lars(), 'LarsCV':LarsCV(), 'Lasso':Lasso(), 'LassoCV':LassoCV(), 'LassoLars':LassoLars(), 'LassoLarsCV':LassoLarsCV(), 'LassoLarsIC':LassoLarsIC(), 'LatentDirichletAllocation':LatentDirichletAllocation(), 'LedoitWolf':LedoitWolf(), 'LinearDiscriminantAnalysis':LinearDiscriminantAnalysis(), 'LinearRegression':LinearRegression(), 'LinearSVC':LinearSVC(), 'LinearSVR':LinearSVR(), 'LocallyLinearEmbedding':LocallyLinearEmbedding(), 'LogisticRegression':LogisticRegression(), 'LogisticRegressionCV':LogisticRegressionCV(), 'MDS':MDS(), 'MLPClassifier':MLPClassifier(), 'MLPRegressor':MLPRegressor(), 'MaxAbsScaler':MaxAbsScaler(),
def _sparse_encode(X, dictionary, gram, cov=None, algorithm='lasso_lars', regularization=None, copy_cov=True, init=None, max_iter=1000): """Generic sparse coding Each column of the result is the solution to a Lasso problem. Parameters ---------- X: array of shape (n_samples, n_features) Data matrix. dictionary: array of shape (n_components, n_features) The dictionary matrix against which to solve the sparse coding of the data. Some of the algorithms assume normalized rows. gram: None | array, shape=(n_components, n_components) Precomputed Gram matrix, dictionary * dictionary' gram can be None if method is 'threshold'. cov: array, shape=(n_components, n_samples) Precomputed covariance, dictionary * X' algorithm: {'lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'} lars: uses the least angle regression method (linear_model.lars_path) lasso_lars: uses Lars to compute the Lasso solution lasso_cd: uses the coordinate descent method to compute the Lasso solution (linear_model.Lasso). lasso_lars will be faster if the estimated components are sparse. omp: uses orthogonal matching pursuit to estimate the sparse solution threshold: squashes to zero all coefficients less than regularization from the projection dictionary * data' regularization : int | float The regularization parameter. It corresponds to alpha when algorithm is 'lasso_lars', 'lasso_cd' or 'threshold'. Otherwise it corresponds to n_nonzero_coefs. init: array of shape (n_samples, n_components) Initialization value of the sparse code. Only used if `algorithm='lasso_cd'`. max_iter: int, 1000 by default Maximum number of iterations to perform if `algorithm='lasso_cd'`. copy_cov: boolean, optional Whether to copy the precomputed covariance matrix; if False, it may be overwritten. Returns ------- code: array of shape (n_components, n_features) The sparse codes See also -------- sklearn.linear_model.lars_path sklearn.linear_model.orthogonal_mp sklearn.linear_model.Lasso SparseCoder """ if X.ndim == 1: X = X[:, np.newaxis] n_samples, n_features = X.shape if cov is None and algorithm != 'lasso_cd': # overwriting cov is safe copy_cov = False cov = np.dot(dictionary, X.T) if algorithm == 'lasso_lars': alpha = float(regularization) / n_features # account for scaling try: err_mgt = np.seterr(all='ignore') lasso_lars = LassoLars(alpha=alpha, fit_intercept=False, verbose=False, normalize=False, precompute=gram, fit_path=False) lasso_lars.fit(dictionary.T, X.T, Xy=cov) new_code = lasso_lars.coef_ finally: np.seterr(**err_mgt) elif algorithm == 'lasso_cd': alpha = float(regularization) / n_features # account for scaling clf = Lasso(alpha=alpha, fit_intercept=False, precompute=None, max_iter=max_iter,positive=True) clf.fit(dictionary.T, X.T, Xy=cov, coef_init=init) new_code = clf.coef_ elif algorithm == 'lars': print "lars not fit this method" elif algorithm == 'threshold': new_code = ((np.sign(cov) * np.maximum(np.abs(cov) - regularization, 0)).T) elif algorithm == 'omp': norms_squared = np.sum((X ** 2), axis=1) new_code = orthogonal_mp_gram(gram, cov, regularization, None, norms_squared, copy_Xy=copy_cov).T else: raise ValueError('Sparse coding method must be "lasso_lars" ' '"lasso_cd", "lasso", "threshold" or "omp", got %s.' % algorithm) return new_code