trainingLoss = 0 tp, tn, fp, fn = 0, 0, 0, 0 for _ in range(args.updates_per_epoch): x, label = next(training_batch) loss = classifier.update(x, label) trainingLoss += loss tpb, tnb, fpb, fnb = classifier.evaluate(x, label) tp += tpb tn += tnb fp += fpb fn += fnb trainAcc, trainPrec, _, _, trainFOR = metric.metric(tp, tn, fp, fn) x_test, label_test = ds.batchTest() tp, tn, fp, fn = classifier.evaluate(x_test, label_test) testAcc, testPrec, _, _, testFOR = metric.metric(tp, tn, fp, fn) trainingLoss /= args.updates_per_epoch s = "Loss: {: .4f}".format(trainingLoss) print(epoch, s) print( "Training Accuracy = {:.4f}, Precision = {:.4f}, FOR = {:.4f}".format( trainAcc, trainPrec, trainFOR)) print("Testing Accuracy = {:.4f}, Precision = {:.4f}, FOR = {:.4f}".format( testAcc, testPrec, testFOR)) if args.save_weights: classifier.save_weights(args.save_weights) classifier.done()