Example #1
0
opt = keras.optimizers.Adam(lr=0.001)

if TRAIN:

    model.compile(loss='mse',
                  optimizer=opt,
                  metrics=[auxilary.coefficientofdetermination])
    model.fit(X_train,
              y_train,
              validation_data=(X_val, y_val),
              epochs=2000,
              batch_size=32,
              callbacks=[es, mc])

    bestModel = load_model('best_model.h5',
                           custom_objects={
                               'coefficientofdetermination':
                               auxilary.coefficientofdetermination
                           })

    del X_test['id']
    X_test = scaler.fit_transform(X_test)
    X_test = np.nan_to_num(X_test, nan=0)
    X_test = featureSelection.transform(X_test)

    y_predictions = bestModel.predict(X_test)
    y_predictions = np.reshape(y_predictions, y_predictions.shape[0])

    auxilary.createSubmissionFiles(y_predictions)

model.predict(X_test)
Example #2
0
parameters = {
    'objective': ['binary:logistic'],
    'max_depth': [10],
    'min_child_weight': [11],
    'n_estimators': [400],
    'seed': [1111],
    'learning_rate': [0.05],
    'max_delta_step': [3],
    'num_class': [4]
}

clf = GridSearchCV(estimator=xgb_model,
                   param_grid=parameters,
                   n_jobs=5,
                   cv=10,
                   scoring=scoreFunction,
                   verbose=2)
clf.fit(X_train, y_train)

print("Best score of best on validation set: ", clf.best_score_)  #0.6670
print("Best Parameters: ", clf.best_params_)  #rbf, 10

X_test = scaler.transform(X_test)
if FEATURE_SELECTION:
    X_test = featureSelection.transform(X_test)
y_pred_test = clf.predict(X_test)
print('Number of 3:', np.count_nonzero(y_pred_test == 3))

auxilary.createSubmissionFiles(y_pred_test)