Exemplo n.º 1
0
def train_model(X, y, clf):

    #split the dataset
    crossvalidation = cross_validation.StratifiedKFold(y, n_folds=5)

    #fit the model
    cms = []
    train_scores = []
    test_scores = []


    for train, test in crossvalidation:
        X_train, y_train = X[train], y[train]
        X_test, y_test = X[test], y[test]

        X_train, X_test = impute_nan(X_train, X_test)
        X_train, X_test = normalize_features(X_train, X_test)
        #print(X_train[0])

        clf.fit(X_train, y_train)

        #evaluate the model
        train_score = clf.score(X_train, y_train)
        train_scores.append(train_score)
        test_score = clf.score(X_test, y_test)
        test_scores.append(test_score)

        y_predict = clf.predict(X_test)
        cm = confusion_matrix(y_test, y_predict)
        cms.append(cm)

    return np.mean(test_scores), np.mean(train_scores), np.asarray(cms)
Exemplo n.º 2
0
def train_model(X, y, clf):

    #split the dataset
    crossvalidation = cross_validation.StratifiedKFold(y, n_folds=5)

    #fit the model
    cms = []
    train_scores = []
    test_scores = []

    for train, test in crossvalidation:
        X_train, y_train = X[train], y[train]
        X_test, y_test = X[test], y[test]

        X_train, X_test = impute_nan(X_train, X_test)
        X_train, X_test = normalize_features(X_train, X_test)
        #print(X_train[0])

        clf.fit(X_train, y_train)

        #evaluate the model
        train_score = clf.score(X_train, y_train)
        train_scores.append(train_score)
        test_score = clf.score(X_test, y_test)
        test_scores.append(test_score)

        y_predict = clf.predict(X_test)
        cm = confusion_matrix(y_test, y_predict)
        cms.append(cm)

    return np.mean(test_scores), np.mean(train_scores), np.asarray(cms)
Exemplo n.º 3
0
def train_sknn(X, y):
    '''
        NeuralNet with sknn
    '''
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.25,
                                                        random_state=5)
    X_train, X_test = impute_nan(X_train, X_test)
    X_train, X_test = normalize_features(X_train, X_test)
    nn = Classifier(layers=[Layer("Tanh", units=12),
                            Layer("Softmax")],
                    learning_rate=0.005,
                    n_iter=25)

    # gs = GridSearchCV(nn, param_grid={
    #     'learning_rate': [0.05, 0.01, 0.005, 0.001],
    #     'hidden0__units': [4, 8, 12,100],
    #     'hidden0__type': ["Rectifier", "Sigmoid", "Tanh"]})
    # gs.fit(X_train, y_train)
    # print(gs.best_estimator_)
    nn.fit(X_train, y_train)
    predicted = nn.predict(X_test).flatten()
    labels = y_test
    return predicted, labels
Exemplo n.º 4
0
def train_nolearn_model(X, y):
    '''
        NeuralNet with nolearn
    '''
    X = X.astype(np.float32)
    y = y.astype(np.int32)

    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.2,
                                                        random_state=5)
    X_train, X_test = impute_nan(X_train, X_test)
    X_train, X_test = normalize_features(X_train, X_test)

    lays = [
        ('input', layers.InputLayer),
        ('hidden', layers.DenseLayer),
        ('output', layers.DenseLayer),
    ]

    net = NeuralNet(
        layers=lays,
        input_shape=(None, 23),
        hidden_num_units=10,
        objective_loss_function=lasagne.objectives.categorical_crossentropy,
        output_nonlinearity=lasagne.nonlinearities.sigmoid,
        output_num_units=10,
        update=nesterov_momentum,
        update_learning_rate=0.001,
        update_momentum=0.9,
        max_epochs=10,
        verbose=1,
    )
    #net.fit(X_train, y_train)
    #predicted = net.predict(X_test)
    test_score = net.predict(X_test, y_test)
    train_score = net.score(X_train, y_train)
    return train_score, test_score
Exemplo n.º 5
0
def train_nolearn_model(X, y):
    '''
        NeuralNet with nolearn
    '''
    X = X.astype(np.float32)
    y = y.astype(np.int32)

    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 5)
    X_train, X_test = impute_nan(X_train, X_test)
    X_train, X_test = normalize_features(X_train, X_test)

    lays = [('input', layers.InputLayer),
              ('hidden', layers.DenseLayer),
              ('output', layers.DenseLayer),
             ]

    net = NeuralNet(
        layers = lays,
        input_shape=(None, 23),
        hidden_num_units=10,
        objective_loss_function=lasagne.objectives.categorical_crossentropy,
        output_nonlinearity=lasagne.nonlinearities.sigmoid,
        output_num_units=10,


        update = nesterov_momentum,
        update_learning_rate= 0.001,
        update_momentum=0.9,

        max_epochs=10,
        verbose=1,
        )
    #net.fit(X_train, y_train)
    #predicted = net.predict(X_test)
    test_score = net.predict(X_test, y_test)
    train_score = net.score(X_train, y_train)
    return train_score, test_score
Exemplo n.º 6
0
def train_sknn(X, y):
    '''
        NeuralNet with sknn
    '''
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 5)
    X_train, X_test = impute_nan(X_train, X_test)
    X_train, X_test = normalize_features(X_train, X_test)
    nn = Classifier(
        layers=[
            Layer("Tanh", units=12),
            Layer("Softmax")],
        learning_rate=0.005,
        n_iter=25)

    # gs = GridSearchCV(nn, param_grid={
    #     'learning_rate': [0.05, 0.01, 0.005, 0.001],
    #     'hidden0__units': [4, 8, 12,100],
    #     'hidden0__type': ["Rectifier", "Sigmoid", "Tanh"]})
    # gs.fit(X_train, y_train)
    # print(gs.best_estimator_)
    nn.fit(X_train, y_train)
    predicted = nn.predict(X_test).flatten()
    labels = y_test
    return predicted, labels