lr=0.001)

            # Train the model
            model.fit(x=train_data,
                      y=train_labels,
                      epochs=__N_EPOCHS,
                      shuffle=True,
                      batch_size=2056,
                      verbose=False)

            # Predict on the test set
            test_preds = model.predict(x=test_data)

            # Get metrics at 0.50 decision threshold
            auc = roc_auc_score(y_true=test_labels, y_score=test_preds)
            acc = get_acc(preds=test_preds[:, 1], labels=test_labels[:, 1])
            sens = get_sens(preds=test_preds[:, 1], labels=test_labels[:, 1])
            spec = get_spec(preds=test_preds[:, 1], labels=test_labels[:, 1])

            # Get metrics at optimal decision threshold
            opt_thresh = get_opt_thresh(preds=test_preds[:, 1],
                                        labels=test_labels[:, 1],
                                        n_thresholds=10000)
            test_acc_opt = get_acc(preds=test_preds[:, 1],
                                   labels=test_labels[:, 1],
                                   threshold=opt_thresh)
            test_sens_opt = get_sens(preds=test_preds[:, 1],
                                     labels=test_labels[:, 1],
                                     threshold=opt_thresh)
            test_spec_opt = get_spec(preds=test_preds[:, 1],
                                     labels=test_labels[:, 1],
                batch_size=2048,
                verbose=False)

        # Calculate the predictions
        g1_preds = dnn.predict(x=g1_d)

        # Get and store ROC AUC
        g1_auc = 100 * roc_auc_score(y_true=g1_labels, y_score=g1_preds)
        auc_scores[run_idx] = g1_auc

        # Get optimal decision threshold
        opt_thresh = get_opt_thresh(preds=g1_preds[:, 1],
                                    labels=g1_labels[:, 1])

        # Store performance metrics
        accs[run_idx] = 100 * get_acc(
            preds=g1_preds[:, 1], labels=g1_labels[:, 1], threshold=opt_thresh)
        sens[run_idx] = 100 * get_sens(
            preds=g1_preds[:, 1], labels=g1_labels[:, 1], threshold=opt_thresh)
        spec[run_idx] = 100 * get_spec(
            preds=g1_preds[:, 1], labels=g1_labels[:, 1], threshold=opt_thresh)
        # Plot ROC curve
        plt_roc_curve(preds=g1_preds[:, 1],
                      labels=g1_labels[:, 1],
                      save_str='dnn_run_%d_roc' % run_idx,
                      save=True)

        # Report AUC at this run
        logger.info('\t\tAUC:\t%.2f' % g1_auc)

        # Get the class predictions
        class_preds = g1_preds * np.zeros_like(g1_preds)