Esempio n. 1
0
def decision_boundary(model, features, classes, X_train, Y_train, X_test,
                      Y_test):
    from yellowbrick.contrib.classifier import DecisionViz
    features = ['name_sim', 'add_sim']
    viz = DecisionViz(model,
                      title="random forest",
                      features=features,
                      classes=classes)
    viz.fit(X_train, Y_train)
    viz.draw(X_test, Y_test)
    viz.poof()
Esempio n. 2
0
def decision():
    X, y = make_moons(noise=0.3)
    X = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = tts(X, y, test_size=0.20)

    oz = DecisionViz(KNeighborsClassifier(3), ax=newfig())
    oz.fit(X_train, y_train)
    oz.draw(X_test, y_test)
    savefig(oz, "decision_boundaries")
def draw_boundaries():
    data = datasets.load_iris().data[:, :
                                     2]  # we only take the first two features.
    label = np.array(datasets.load_iris().target,
                     dtype=int)  # Take classes names

    data = StandardScaler().fit_transform(data)  # Rescale data

    viz = DecisionViz(
        GaussianNB(),
        title="Gaussian",
        features=['Sepal Length', 'Sepal Width'],
        classes=['A', 'B', 'C']  # Determine dimension and no. of classes
    )
    viz.fit(data, label)  # Train data to draw
    viz.draw(data, label)  # Draw Data
    viz.show()
Esempio n. 4
0
def lab3_1():
    data = datasets.load_iris()
    X = data.data[:, [0, 2]]
    y = data.target
    X = StandardScaler().fit_transform(X, y)

    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        train_size=0.8,
                                                        random_state=42,
                                                        stratify=y)

    # scaler = StandardScaler().fit_transform(X_train, y_train)
    # scaler = preprocessing.scale(X_train)
    # plt.scatter(X[:, 0], X[:, 1])
    # plt.axvline(x=0)
    # plt.axhline(y=0)
    # plt.title('Iris sepal features')
    # plt.xlabel('sepal length (cm)')
    # plt.ylabel('sepal width (cm)')
    # plt.show()

    classifier = svm.SVC()
    classifier.fit(X_train, y_train)

    cm_bright = ListedColormap(['#FF0000', '#0000FF'])
    ax = plt.subplot()
    ax.scatter(X_train[:, 0],
               X_train[:, 1],
               c=y_train,
               cmap=cm_bright,
               edgecolors='k')

    ax.scatter(X_test[:, 0],
               X_test[:, 1],
               c=y_test,
               cmap=cm_bright,
               edgecolors='k',
               alpha=0.6)

    # plot_decision_regions(X=X, y=y, clf=classifier, legend=2)

    viz = DecisionViz(classifier,
                      X[:2],
                      y[:2],
                      features=data.feature_names[:2],
                      classes=list(data.target_names[:2]))
    viz.fit(X_train, y_train)
    viz.fit_draw_show(X_test, y_test)

    plt.xlabel('sepal length [cm]')
    plt.ylabel('petal length [cm]')
    plt.title('SVM on Iris')
    plt.show()
Esempio n. 5
0
                           n_clusters_per_class=1)

rng = np.random.RandomState(2)
X += 2 * rng.uniform(size=X.shape)
linearly_separable = (X, y)

data_set = make_moons(noise=0.3, random_state=0)

X, y = data_set
X = StandardScaler().fit_transform(X)
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=.4,
                                                    random_state=42)

viz = DecisionViz(KNeighborsClassifier(3),
                  title="Nearest Neighbors",
                  features=['Feature One', 'Feature Two'],
                  classes=['A', 'B'])
viz.fit(X_train, y_train)
viz.draw(X_test, y_test)
viz.poof(outpath="images/knn_decisionviz.png")

viz = DecisionViz(SVC(kernel="linear", C=0.025),
                  title="Linear SVM",
                  features=['Feature One', 'Feature Two'],
                  classes=['A', 'B'])
viz.fit(X_train, y_train)
viz.draw(X_test, y_test)
viz.poof(outpath="images/svc_decisionviz.png")
Esempio n. 6
0
from sklearn.model_selection import train_test_split as tts
from sklearn.preprocessing import StandardScaler
from sklearn import datasets
import pandas as pd
from sklearn.neighbors import KNeighborsClassifier
from yellowbrick.contrib.classifier import DecisionViz

iris = datasets.load_iris()

labels = pd.DataFrame(iris.target)
labels.columns = ['labels']
data = pd.DataFrame(iris.data)
data.columns = ['Sepal length', 'Sepal width', 'Petal length', 'Petal width']
data = pd.concat([data, labels], axis=1)
data_sp = data[['Sepal length', 'Sepal width', 'labels']]

X = StandardScaler().fit_transform(data_sp.drop(['labels'], axis=1))
X_train, X_test, y_train, y_test = tts(X,
                                       data_sp['labels'],
                                       test_size=.4,
                                       random_state=42)

viz = DecisionViz(KNeighborsClassifier(3),
                  title="Nearest Neighbors",
                  features=['Feature One', 'Feature Two'],
                  classes=["0", "1", "2"])

viz.fit(X_train, y_train)
viz.draw(X_test, y_test)
viz.show()
Esempio n. 7
0
def execute_classification_code(code, session):
    global df, model, problem_class, order
    code_str = urllib.parse.unquote(code)
    code_arr = code_str.split("\n")
    print(code_arr)
    problem_class = code_arr[0]
    print(problem_class)
    order = code_arr[1]
    print(order)
    exec(code_arr[2])
    print(df)
    exec(code_arr[3], globals())

    cmap_pink_green = sns.diverging_palette(352, 136, s=96, l=51, n=7)
    viz = ClassificationReport(model, cmap=cmap_pink_green)
    viz.fit(X_train, y_train)
    viz.score(X_test, y_test)
    viz.poof(outpath="./plots/classificationmatrix" + session + ".png")
    image_path_class = "classificationmatrix"

    plt.clf()
    plt.cla()
    plt.close()

    le = LabelEncoder()
    dec_viz = DecisionViz(model,
                          title="Decision Boundaries",
                          features=np.where(cols == True)[0].tolist(),
                          classes=list(map(str, y.iloc[:, 0].unique())).sort())
    dec_viz.fit(X_train.to_numpy(), le.fit_transform(y_train))
    dec_viz.draw(X_test.to_numpy(), le.fit_transform(y_test))
    dec_viz.poof(outpath="./plots/decviz" + session + ".png")
    image_path_dec = "decviz"

    plt.clf()
    plt.cla()
    plt.close()

    print(list(map(str, y.iloc[:, 0].unique())))
    cmap_salmon_dijon = sns.diverging_palette(28, 65, s=98, l=78, n=7)
    cm = ConfusionMatrix(model,
                         classes=list(map(str, y.iloc[:, 0].unique())).sort(),
                         cmap=cmap_salmon_dijon)
    cm.fit(X_train, y_train)
    cm.score(X_test, y_test)
    plt.tight_layout()
    cm.poof(outpath="./plots/cm" + session + ".png")
    image_path_cm = "cm"

    plt.clf()
    plt.cla()
    plt.close()

    model.fit(X_train, y_train)

    file = 'pickled_models/trained_model' + session + '.sav'
    pickle_path = 'trained_model'
    pickle.dump(model, open(file, 'wb'))

    return jsonify(image_path_class, image_path_dec, image_path_cm,
                   pickle_path)