Exemple #1
0
def update_svm_graph(
    kernel,
    degree,
    C_coef,
    C_power,
    gamma_coef,
    gamma_power,
    dataset,
    noise,
    shrinking,
    threshold,
    sample_size,
):
    t_start = time.time()
    h = 0.3  # step size in the mesh

    # Data Pre-processing
    # BEGINNING WORK HERE -----------------------------------------------------------

    #X, y = generate_data(n_samples=sample_size, dataset=dataset, noise=noise)
    #ME
    #X = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.4,
                                                        random_state=42)

    x_min = X.loc[:, 0].min() - 0.5  #ME
    x_max = X.loc[:, 0].max() + 0.5  #ME
    y_min = X.loc[:, 1].min() - 0.5  #ME
    y_max = X.loc[:, 1].max() + 0.5  #Me

    #x_min = X[:, 0].min() - 0.5 #original
    #x_max = X[:, 0].max() + 0.5 #original
    #y_min = X[:, 1].min() - 0.5 #original
    #y_max = X[:, 1].max() + 0.5 #original

    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
                         np.arange(y_min, y_max, h))

    C = C_coef * 10**C_power
    gamma = gamma_coef * 10**gamma_power

    if shrinking == "True":
        flag = True
    else:
        flag = False

    # Train SVM
    clf = SVC(C=C, kernel=kernel, degree=degree, gamma=gamma, shrinking=flag)
    clf.fit(X_train, y_train)

    # Plot the decision boundary. For that, we will assign a color to each
    # point in the mesh [x_min, x_max]x[y_min, y_max].
    if hasattr(clf, "decision_function"):
        Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
    else:
        Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]

    prediction_figure = figs.serve_prediction_plot(
        model=clf,
        X_train=X_train,
        X_test=X_test,
        y_train=y_train,
        y_test=y_test,
        Z=Z,
        xx=xx,
        yy=yy,
        mesh_step=h,
        threshold=threshold,
    )

    roc_figure = figs.serve_roc_curve(model=clf, X_test=X_test, y_test=y_test)

    confusion_figure = figs.serve_pie_confusion_matrix(model=clf,
                                                       X_test=X_test,
                                                       y_test=y_test,
                                                       Z=Z,
                                                       threshold=threshold)

    return [
        html.Div(
            id="svm-graph-container",
            children=dcc.Loading(
                className="graph-wrapper",
                children=dcc.Graph(id="graph-sklearn-svm",
                                   figure=prediction_figure),
                style={"display": "none"},
            ),
        ),
        html.Div(
            id="graphs-container",
            children=[
                dcc.Loading(
                    className="graph-wrapper",
                    children=dcc.Graph(id="graph-line-roc-curve",
                                       figure=roc_figure),
                ),
                dcc.Loading(
                    className="graph-wrapper",
                    children=dcc.Graph(id="graph-pie-confusion-matrix",
                                       figure=confusion_figure),
                ),
            ],
        ),
    ]
Exemple #2
0
def update_svm_graph(kernel, degree, C_coef, C_power, gamma_coef, gamma_power,
                     dataset, noise, shrinking, threshold, sample_size):
    t_start = time.time()
    h = .3  # step size in the mesh

    # Data Pre-processing
    X, y = generate_data(n_samples=sample_size, dataset=dataset, noise=noise)
    X = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = \
        train_test_split(X, y, test_size=.4, random_state=42)

    x_min = X[:, 0].min() - .5
    x_max = X[:, 0].max() + .5
    y_min = X[:, 1].min() - .5
    y_max = X[:, 1].max() + .5
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
                         np.arange(y_min, y_max, h))

    C = C_coef * 10**C_power
    gamma = gamma_coef * 10**gamma_power

    # Train SVM
    clf = SVC(C=C,
              kernel=kernel,
              degree=degree,
              gamma=gamma,
              shrinking=shrinking)
    clf.fit(X_train, y_train)

    # Plot the decision boundary. For that, we will assign a color to each
    # point in the mesh [x_min, x_max]x[y_min, y_max].
    if hasattr(clf, "decision_function"):
        Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
    else:
        Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]

    prediction_figure = serve_prediction_plot(model=clf,
                                              X_train=X_train,
                                              X_test=X_test,
                                              y_train=y_train,
                                              y_test=y_test,
                                              Z=Z,
                                              xx=xx,
                                              yy=yy,
                                              mesh_step=h,
                                              threshold=threshold)

    roc_figure = serve_roc_curve(model=clf, X_test=X_test, y_test=y_test)

    confusion_figure = serve_pie_confusion_matrix(model=clf,
                                                  X_test=X_test,
                                                  y_test=y_test,
                                                  Z=Z,
                                                  threshold=threshold)

    print(f"Total Time Taken: {time.time() - t_start:.3f} sec")

    return [
        html.Div(
            className='three columns',
            style={
                'min-width': '24.5%',
                'height': 'calc(100vh - 90px)',
                'margin-top': '5px',

                # Remove possibility to select the text for better UX
                'user-select': 'none',
                '-moz-user-select': 'none',
                '-webkit-user-select': 'none',
                '-ms-user-select': 'none'
            },
            children=[
                dcc.Graph(id='graph-line-roc-curve',
                          style={'height': '40%'},
                          figure=roc_figure),
                dcc.Graph(id='graph-pie-confusion-matrix',
                          figure=confusion_figure,
                          style={'height': '60%'})
            ]),
        html.Div(className='six columns',
                 style={'margin-top': '5px'},
                 children=[
                     dcc.Graph(id='graph-sklearn-svm',
                               figure=prediction_figure,
                               style={'height': 'calc(100vh - 90px)'})
                 ])
    ]
Exemple #3
0
def update_svm_graph(
    model,
    kernel,
    degree,
    C_coef,
    C_power,
    gamma_coef,
    gamma_power,
    dataset,
    noise,
    shrinking,
    threshold,
    sample_size,
    logreg_reg_type,
    logreg_C_coef,
    logreg_C_power,
    logreg_l1_ratio,
    mlp_layers,
    mlp_activation,
    mlp_batch_size,
    mlp_l2_coef,
    mlp_l2_pow,
):
    h = 0.3  # step size in the mesh

    # Data Pre-processing
    X, y = generate_data(n_samples=sample_size, dataset=dataset, noise=noise)
    X = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.4,
                                                        random_state=42)

    x_min = X[:, 0].min() - 1.5
    x_max = X[:, 0].max() + 1.5
    y_min = X[:, 1].min() - 1.5
    y_max = X[:, 1].max() + 1.5
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
                         np.arange(y_min, y_max, h))

    if model == "SVM":
        C = C_coef * 10**C_power
        gamma = gamma_coef * 10**gamma_power

        if shrinking == "True":
            flag = True
        else:
            flag = False

        clf = SVC(
            C=C,
            kernel=kernel,
            degree=degree,
            gamma=gamma,
            shrinking=flag,
            probability=True,
        )

    elif model == "LogReg":
        C = logreg_C_coef * 10**logreg_C_power

        if logreg_reg_type == "none" or logreg_reg_type == "elasticnet":
            solver = "saga"
        else:
            solver = "liblinear"

        clf = LogisticRegression(penalty=logreg_reg_type,
                                 C=C,
                                 l1_ratio=logreg_l1_ratio,
                                 solver=solver)

    elif model == "LDA":
        clf = LinearDiscriminantAnalysis()

    elif model == "QDA":
        clf = QuadraticDiscriminantAnalysis()

    elif model == "MLP":
        hidden_layers = tuple(map(int, mlp_layers.split(", ")))
        l2_penalty = mlp_l2_coef * 10**mlp_l2_pow

        clf = MLPClassifier(
            hidden_layer_sizes=hidden_layers,
            activation=mlp_activation,
            batch_size=mlp_batch_size,
            alpha=l2_penalty,
        )

    else:
        raise ValueError(f"Unsupported model: {model}")
    # clf = DecisionTreeClassifier()
    # clf = MLPClassifier()
    clf.fit(X_train, y_train)

    # Plot the decision boundary. For that, we will assign a color to each
    # point in the mesh [x_min, x_max]x[y_min, y_max].
    # if hasattr(clf, "decision_function"):
    #     Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
    # else:
    Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]

    prediction_figure = figs.serve_prediction_plot(
        model=clf,
        X_train=X_train,
        X_test=X_test,
        y_train=y_train,
        y_test=y_test,
        Z=Z,
        xx=xx,
        yy=yy,
        mesh_step=h,
        threshold=threshold,
    )

    roc_figure = figs.serve_roc_curve(model=clf, X_test=X_test, y_test=y_test)

    confusion_figure = figs.serve_pie_confusion_matrix(model=clf,
                                                       X_test=X_test,
                                                       y_test=y_test,
                                                       Z=Z,
                                                       threshold=threshold)

    return [
        html.Div(
            id="svm-graph-container",
            children=dcc.Loading(
                className="graph-wrapper",
                children=dcc.Graph(id="graph-sklearn-svm",
                                   figure=prediction_figure),
                style={"display": "none"},
            ),
        ),
        html.Div(
            id="graphs-container",
            children=[
                dcc.Loading(
                    className="graph-wrapper",
                    children=dcc.Graph(id="graph-line-roc-curve",
                                       figure=roc_figure),
                ),
                dcc.Loading(
                    className="graph-wrapper",
                    children=dcc.Graph(id="graph-pie-confusion-matrix",
                                       figure=confusion_figure),
                ),
            ],
        ),
    ]
Exemple #4
0
def update_svm_graph(col1, col2, threshold):
    t_start = time.time()
    h = 0.3  # step size in the mesh
    """"# Data Pre-processing
    X, y = generate_data(n_samples=sample_size, dataset=dataset, noise=noise)
    X = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = train_test_split( X, y, test_size=0.4, random_state=42)
    print(X_train)
    print(X_train.shape)

    """

    print(app.encuestas["X"].columns)
    X = app.encuestas["X"][[col1, col2]].to_numpy()
    y = app.encuestas["y"].to_numpy()

    X_train, X_test, y_train, y_test = app.split_data(X, y)

    print(X.shape, y.shape)
    print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)

    #"""

    np.savetxt("test.csv", X_train, delimiter=",")

    x_min = X[:, 0].min() - 0.5
    x_max = X[:, 0].max() + 0.5
    y_min = X[:, 1].min() - 0.5
    y_max = X[:, 1].max() + 0.5
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
                         np.arange(y_min, y_max, h))
    # xx = app.encuestas["XX"]
    # yy = app.encuestas["yy"]

    # Train SVM
    #clf = SVC(C=C, kernel=kernel, degree=degree, gamma=gamma, shrinking=flag)
    clf = app.get_svc_classifier(col1, col2, X_train, y_train)
    #..clf.fit(X_train, y_train)

    # Plot the decision boundary. For that, we will assign a color to each
    # point in the mesh [x_min, x_max]x[y_min, y_max].
    if hasattr(clf, "decision_function"):
        Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
    else:
        Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]

    prediction_figure = figs.serve_prediction_plot(
        model=clf,
        X_train=X_train,
        X_test=X_test,
        y_train=y_train,
        y_test=y_test,
        Z=Z,
        xx=xx,
        yy=yy,
        mesh_step=h,
        threshold=threshold,
    )

    #roc_figure = figs.serve_roc_curve(model=clf, X_test=X_test, y_test=y_test)

    #confusion_figure = figs.serve_pie_confusion_matrix(
    #    model=clf, X_test=X_test, y_test=y_test, Z=Z, threshold=threshold
    #)

    return [
        html.Div(
            id="svm-graph-container",
            children=dcc.Loading(
                className="graph-wrapper",
                children=dcc.Graph(id="graph-sklearn-svm",
                                   figure=prediction_figure),
                style={"display": "none"},
            ),
        ),
        #html.Div(
        #    id="graphs-container",
        #    children=[
        #        #dcc.Loading(
        #    className="graph-wrapper",
        #    children=dcc.Graph(id="graph-line-roc-curve", figure=roc_figure),
        #),
        #        dcc.Loading(
        #            className="graph-wrapper",
        ##            children=dcc.Graph(
        #                id="graph-pie-confusion-matrix", figure=confusion_figure
        #            ),
        #        ),
        #    ],
        #),
    ]