示例#1
0
        for j, lmbd in enumerate(lmbd_vals):
            for k, act_h in enumerate(acts_hidden):
                for l, hn in enumerate(hidden_neurons):
                    dnn = NeuralNetwork(XTrain, Y_train_onehot.toarray(), XTest, Y_test_onehot.toarray(), cost=cost, batch_size=batch_size, eta=eta, lmbd=lmbd, n_hidden_neurons=hn, act_h=act_h, epochs=epochs, tol = tol)
                    costs, scores = dnn.train()
                    accuracy = scores[-1,1,0]
                    rocauc = scores[-1,1,1]
                    #dnn.plot_costs(color_iter)
                    dnn.plot_scores(color_iter)
                    color_iter += 1


                    yTrue, yPred = yTest, dnn.predict(XTest)
                    
                    
                    logreg.own_classification_report(yTest,yPred)
                    if metric=="accuracy":
                        if accuracy > best_accuracy:
                            best_accuracy = accuracy
                            best_eta = eta
                            best_lmbd = lmbd
                            best_hn = hn
                            best_act_h = act_h
                            best_rocauc = rocauc
                    elif metric=="roc_auc":
                        if rocauc > best_rocauc:
                            best_accuracy = accuracy
                            best_eta = eta
                            best_lmbd = lmbd
                            best_hn = hn
                            best_act_h = act_h
示例#2
0
         batch_size=100,
         n_epoch=25,
         verbosity=1,
         n_iter=10,
         new_per_iter=False
     )  # Fit using SGD. This can be looped over for best lambda (i.e. learning rate 'lr').
 else:
     beta, costs = logreg.GD(
         XTrain, yTrain.ravel(), lr=lrs[j], rnd_seed=True, tol=1e-2
     )  # Fit using GD. This can be looped over for best lambda (i.e. learning rate 'lr').
     betas = beta.copy()
 if plt_cost:
     plt.plot(costs, label='%5.3f' % lrs[i])
 yPred = logreg.predict(XTest)  #predict
 f1, ac1 = logreg.own_classification_report(yTest,
                                            yPred,
                                            return_f1=True,
                                            return_ac=True)
 yPred = logreg.predict(XTrain)  #predict
 f2, ac2 = logreg.own_classification_report(yTrain,
                                            yPred,
                                            return_f1=True,
                                            return_ac=True)
 f3 = (f1 + f2) / 2.0
 ac = (ac1 + ac2) / 2.0
 f1_log.append(f1)
 f3_log.append(f3)
 ac_log.append(ac)
 ac1_log.append(ac1)
 if (return_ar):
     ar = logreg.plot_cumulative(XTest, yTest, return_ar=return_ar)
     ar_log.append(ar)