コード例 #1
0
def train_plot(training_data):
    #plot the training data
    plt.plot(training_data['loss'], linewidth=2, label='Train')
    plt.plot(training_data['val_loss'], linewidth=2, label='Valid')
    plt.legend(loc='upper right')
    plt.title('Model Loss')
    plt.ylabel('Loss')
    plt.xlabel('Epoch')
    plt.show()


if __name__ == '__main__':

    #import data
    df = pd.read_csv('Book1.csv', index_col = 0)
    x_train, x_valid, y_train, y_valid, X, y = preprocessing(df)
    input_dim = x_train.shape[1]

    #create an instance of the model class
    model = DNN(input_dim)
    model.summary()

    #set save path and train
    model.set_save_path('dnn_v1.h5')
    output = model.fit(x_train, y_train, x_valid, y_valid)
    train_plot(output)

    #load model and predict
    model.load_model('dnn_v1.h5')
    result = model.predict(X)
コード例 #2
0
            else:
                nozerotrain = nozerotrain + 1
                train_label_new.append(1)

        prefilter_train = np.concatenate((train1, train2), axis=1)
        prefilter_test = np.concatenate((test1, test2), axis=1)

        train_label_new_forDNN = np.array([[0, 1] if i == 1 else [1, 0] for i in train_label_new])

        test_label_new_forDNN = np.array([[0, 1] if i == 1 else [1, 0] for i in real_labels])

        model_DNN = DNN()
        model_DNN.fit(prefilter_train, train_label_new_forDNN, epochs=30, batch_size=200)
        model_DNN.save("my_model")
        # model_DNN = models.load_model("my_model")
        print(model_DNN.summary())
        print(model_DNN.summary())
        proba = model_DNN.predict_classes(prefilter_test, batch_size=200, verbose=True)
        ae_y_pred_prob = model_DNN.predict_proba(prefilter_test, batch_size=200, verbose=True)
        acc, precision, sensitivity, specificity, MCC = calculate_performace(len(real_labels), proba, real_labels)
        fpr, tpr, auc_thresholds = roc_curve(real_labels, ae_y_pred_prob[:, 1])
        auc_score = auc(fpr, tpr)

        precision1, recall, pr_threshods = precision_recall_curve(real_labels, ae_y_pred_prob[:, 1])
        aupr_score = auc(recall, precision1)
        # f = f1_score(real_labels, transfer_label_from_prob(ae_y_pred_prob[:,1]))
        all_F_measure = np.zeros(len(pr_threshods))

        print(auc_score)
        print("+++++")
        print(aupr_score)