currentStep = tf.train.global_step(sess, globalStep) print( "train: step: {}, loss: {}, acc: {}, recall: {}, precision: {}, f_beta: {}" .format(currentStep, loss, acc, recall, prec, f_beta)) if currentStep % config.training.evaluateEvery_BiLSTM == 0: print("\nEvaluation:") losses = [] accs = [] f_betas = [] precs = [] recalls = [] for batchEval in nextBatch(evalContents, evalLabels, config.batchSize): loss, acc, precision, recall, f_beta = devStep( batchEval[0], batchEval[1]) losses.append(loss) accs.append(acc) precs.append(precision) recalls.append(recall) f_betas.append(f_beta) time_str = datetime.datetime.now().isoformat() print( "{}, step: {}, loss: {}, acc: {},precision: {}, recall: {}, f_beta: {}" .format(time_str, currentStep, mean(losses), mean(accs), mean(precs), mean(recalls), mean(f_betas))) saver.save(sess, savedModelPath)
losses=[] accs=[] f_betas=[] precs=[] recalls=[] for batchEval in nextBatch(evalContents,evalLabels,config.batchSize): loss,acc,precision,recall,f_beta=devStep(batchEval[0],batchEval[1]) losses.append(loss) accs.append(acc) precs.append(precision) recalls.append(recall) f_betas.append(f_beta) time_str=datetime.datetime.now().isoformat() print("{}, step: {}, loss: {}, acc: {},precision: {}, recall: {}, f_beta: {}".format(time_str, currentStep, mean(losses), mean(accs), mean(precs), mean(recalls), mean(f_betas))) saver.save(sess, savedModelPath)