Пример #1
0
def test(classes=2):
    data, labels = DataGenerator.blobs_2d(100, classes)
    labels = labels.ravel()

    fig = plt.figure(constrained_layout=True)
    gs = fig.add_gridspec(2, 2)

    N = 1000
    axis = (0, 1, 0, 1)

    data_distr = add_plot_data_2d(fig, gs[0, 0], data, labels,
                                  "Data distribution", axis)

    xs = np.random.rand(N, 2)
    bc = GaussianBayesClassifier()
    bc.fit(data, labels)

    res = add_plot_model_predictions(fig, gs[0, 1], xs, bc, "Predictions",
                                     axis, True)

    sk_bc = naive_bayes.GaussianNB()
    sk_bc.fit(data, labels)

    sk_res = add_plot_model_predictions(fig, gs[1, 1], xs, sk_bc,
                                        "sklearn predictions", axis, True)

    plt.show()
Пример #2
0
def test():
    data, labels = DataGenerator.blobs_2d(1000, 3)
    trainX, trainY, testX, testY = split_data(data, labels)

    fig = plt.figure(constrained_layout=True)
    gs = fig.add_gridspec(2, 3)

    N = 1000
    axis = (0, 1, 0, 1)

    dtc = DecisionTreeClassifier(10)
    dtc.fit(trainX, trainY)

    sk_dtc = DTClassifier(max_depth=10)
    sk_dtc.fit(trainX, trainY)

    data_distr = add_plot_data_2d(fig, gs[0, 0], data, labels,
                                  "Data distributions", axis)

    #xs = np.random.rand(N,2)

    res = add_plot_model_predictions(fig, gs[0, 1], testX, dtc, "Predictions",
                                     axis, True)

    sk_res = add_plot_model_predictions(fig, gs[1, 1], testX, sk_dtc,
                                        "sklearn predictions", axis, True)

    add_plot_with_roc_curve(fig, gs[0, 2], testY, dtc.predict(testX))
    add_plot_with_roc_curve(fig, gs[1, 2], testY, sk_dtc.predict(testX))

    plt.show()
Пример #3
0
def test(k_cnt, classes=2):
    data, labels = DataGenerator.blobs_2d(100,classes)
    labels = labels.ravel()
    
    fig = plt.figure(constrained_layout=True)
    gs = fig.add_gridspec(2,1+k_cnt)
    
    N = 1000
    axis = (0,1,0,1)
    
    data_distr = add_plot_data_2d(fig, gs[0,0], data, labels, "Data distribution", axis)
    
    xs = np.random.rand(N,2)
    for i in range(k_cnt):
        k = (i+1)*2
        
        knn = KNNClassifier(k)
        knn.fit(data, labels)
        
        res = add_plot_model_predictions(fig, gs[0,i+1], xs, knn, "Predictions with k = ", axis, True)
        
        sk_knn =  KNeighborsClassifier(n_neighbors=k)
        sk_knn.fit(data, labels)
        
        sk_res = res = add_plot_model_predictions(fig, gs[1,i+1], xs, sk_knn, "sklearn predictions with k = " + str(k), axis, True)
        
    plt.show()
Пример #4
0
def test():
    fig = plt.figure(constrained_layout=True)
    gs = fig.add_gridspec(2, 3)

    data, labels = DataGenerator.blobs_2d(100, 2)

    lg = LogisticRegression(learning_rate=0.1, iterations=1000)
    lg.fit(data, labels)

    sk_lg = LGR()
    sk_lg.fit(data, labels)

    N = 5000
    xs = np.random.rand(N, 2)
    axis = (0, 1, 0, 1)

    data_distrib = add_plot_data_2d(fig, gs[0, 0], data, labels,
                                    "Data distribution", axis)

    res = add_plot_model_predictions(fig, gs[0, 1], xs, lg, "Predictions",
                                     axis)

    sk_res = add_plot_model_predictions(fig, gs[0, 2], xs, sk_lg,
                                        "sklearn predictions", axis)

    ys = lg.predict(xs, True)
    smooth = add_plot_data_2d(fig, gs[1, 1], xs, ys, "Smooth predictions",
                              axis)

    ys = sk_lg.predict_proba(xs)
    ys = np.array([max(1 - y[0], y[1]) for y in ys])
    sk_smooth = add_plot_data_2d(fig, gs[1, 2], xs, ys,
                                 "sklearn smooth predictions", axis)

    error = add_plot_with_error_distrib(fig, gs[1, 0], lg.history)

    plt.show()
Пример #5
0
def test():     
    data, labels = DataGenerator.blobs_2d(100,5)
    
    fig = plt.figure(constrained_layout=True)
    gs = fig.add_gridspec(2,2)
    
    N = 1000
    axis = (0,1,0,1)
    
    dtc = RandomForestClassifier(10)
    dtc.fit(data, labels)
    
    sk_dtc = RFC(max_depth = 10)
    sk_dtc.fit(data, labels)
    
    data_distr = add_plot_data_2d(fig, gs[0,0], data, labels, "Data distributions", axis)
    
    xs = np.random.rand(N,2)
    
    res = add_plot_model_predictions(fig, gs[0,1], xs, dtc, "Predictions", axis, True)

    sk_res = add_plot_model_predictions(fig, gs[1,1], xs, sk_dtc, "sklearn predictions", axis, True)

    plt.show()