def test_knorae_subspaces(): rng = np.random.RandomState(123456) X_dsel, X_test, X_train, y_dsel, y_test, y_train = load_dataset(None, rng) # split the data into training and test data pool = BaggingClassifier(LogisticRegression(), max_features=0.5, random_state=rng).fit(X_train, y_train) knorae = KNORAE(pool) knorae.fit(X_dsel, y_dsel) assert np.isclose(knorae.score(X_test, y_test), 0.9787234042553191)
# be found in the static module. In this experiment we consider two types # of stacking: one using logistic regression as meta-classifier # (default configuration) and the other using a Decision Tree. stacked_lr = StackedClassifier(pool_classifiers, random_state=rng) stacked_dt = StackedClassifier(pool_classifiers, random_state=rng, meta_classifier=DecisionTreeClassifier()) # Fitting the DS techniques knorau.fit(X_dsel, y_dsel) kne.fit(X_dsel, y_dsel) desp.fit(X_dsel, y_dsel) metades.fit(X_dsel, y_dsel) ola.fit(X_dsel, y_dsel) mcb.fit(X_dsel, y_dsel) # Fitting the tacking models stacked_lr.fit(X_dsel, y_dsel) stacked_dt.fit(X_dsel, y_dsel) # Calculate classification accuracy of each technique print('Evaluating DS techniques:') print('Classification accuracy of Majority voting the pool: ', model_voting.score(X_test, y_test)) print('Classification accuracy of KNORA-U: ', knorau.score(X_test, y_test)) print('Classification accuracy of KNORA-E: ', kne.score(X_test, y_test)) print('Classification accuracy of DESP: ', desp.score(X_test, y_test)) print('Classification accuracy of META-DES: ', metades.score(X_test, y_test)) print('Classification accuracy of OLA: ', ola.score(X_test, y_test)) print('Classification accuracy Stacking LR', stacked_lr.score(X_test, y_test)) print('Classification accuracy Stacking DT', stacked_dt.score(X_test, y_test))
def test_kne(knn_methods, voting): pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers() kne = KNORAE(pool_classifiers, knn_classifier=knn_methods, voting=voting) kne.fit(X_dsel, y_dsel) assert np.isclose(kne.score(X_test, y_test), 0.9787234042553191)
# Setting up the random state to have consistent results rng = np.random.RandomState(42) # Generate a classification dataset X, y = make_classification(n_samples=1000, random_state=rng) # split the data into training and test data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=rng) # Split the data into training and DSEL for DS techniques X_train, X_dsel, y_train, y_dsel = train_test_split(X_train, y_train, test_size=0.5, random_state=rng) # Initialize the DS techniques. DS methods can be initialized without # specifying a single input parameter. In this example, we just pass the random # state in order to always have the same result. kne = KNORAE(random_state=rng) meta = METADES(random_state=rng) # Fitting the des techniques kne.fit(X_dsel, y_dsel) meta.fit(X_dsel, y_dsel) # Calculate classification accuracy of each technique print('Evaluating DS techniques:') print('Classification accuracy KNORA-Eliminate: ', kne.score(X_test, y_test)) print('Classification accuracy META-DES: ', meta.score(X_test, y_test))