def test_selects_all():
    from sklearn.neighbors import KNeighborsClassifier
    from mlxtend.data import wine_data

    X, y = wine_data()
    knn = KNeighborsClassifier(n_neighbors=4)
    sffs = SFFS(knn, k_features=13, scoring='accuracy', cv=3, print_progress=False)
    sffs.fit(X, y)
    assert(len(sffs.indices_) == 13)
def test_selects_all():
    from sklearn.neighbors import KNeighborsClassifier
    from mlxtend.data import wine_data

    X, y = wine_data()
    knn = KNeighborsClassifier(n_neighbors=4)
    sfbs = SFBS(knn, k_features=13, scoring='accuracy', cv=3, print_progress=False)
    sfbs.fit(X, y)
    assert(len(sfbs.indices_) == 13)
Beispiel #3
0
def winePipeline(desired_lbl):
    """
        divide the data to train, test and validation sets
        input:
            1. a list of the desired wine lables:   0
                                                    1
                                                    2
        output:
            1. nd.array of data concatenated with binarized labels
        """
    assert len(desired_lbl) == 2, "should get only 2 labels"
    data, lbl = wine_data()
    data = normalizeByMax(data)
    sliced_lbl = np.logical_or(lbl == desired_lbl[0], lbl == desired_lbl[1])
    sliced_data = data[sliced_lbl, :]
    sliced_lbl = lbl[sliced_lbl]
    binary_lbl = binarizeLabels(sliced_lbl)
    return np.append(sliced_data,
                     binary_lbl.reshape(binary_lbl.shape[0], 1),
                     axis=1)
Beispiel #4
0
def test_import_wine_data():
    X, y = wine_data()

    assert (X.shape[0] == 178)
    assert (X.shape[1] == 13)
    assert (y.shape[0] == 178)
Beispiel #5
0
from mlxtend.data import wine_data
from mlxtend.plotting import plot_decision_regions
from mlxtend.classifier import Perceptron
import matplotlib.pyplot as plt

# Loading Data

X, y = wine_data()
X = X[:, [3, 12]] # hue, ash
X = X[0:100] # class 0 and class 1
y = y[0:100] # class 0 and class 1

# standardize
X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()


# Rosenblatt Perceptron

ppn = Perceptron(epochs=500,
                 eta=0.05,
                 random_seed=1,
                 print_progress=3)
ppn.fit(X, y)


plot_decision_regions(X, y, clf=ppn)
plt.title('Wine Data - Hue and Ash 500 Epochs')
plt.show()

print('Bias & Weights: %s' % ppn.w_)
Beispiel #6
0
def test_import_wine_data():
    X, y = wine_data()
    assert(X.shape[0] == 178)
    assert(X.shape[1] == 13)
    print(y.shape)
    assert(y.shape[0] == 178)