print('pred:', data.printtag(tf.argmax(pred[0]).eval())) step += 1 else: loss_total += loss acc_total += acc step += 1 #帮global_step(用来调节学习率指数下降的)加一 model.global_step += 1 #print(model.global_step.eval()) #输出 if step % display_step == 0: writer.add_summary(summary, step) #print('free memory= '+str(int(getMem()/1000000))+"GB, Iter= " + str(step+1) + ", Average Loss= " + \ print( "Iter= " + str(step+1) + ", Average Loss= " + \ "{:.6f}".format(loss_total/display_step) + ", Average Accuracy= " + \ "{:.2f}%".format(100*acc_total/display_step)," Elapsed time: ", elapsed(time.time() - start_time)) if False: # testflag==False and acc_total>max_acc_total: max_acc_total = acc_total print( 'saved to: ', saver.save(session, saving_path3, global_step=step)) # start_time=time.time() acc_total = 0 loss_total = 0 #保存 if step % saving_step == 0: print( 'saved to: ',
"Test w/o decorator syntax." import datetime import time import os from elapsed import elapsed def test(a, b="unkwnown"): print a, b time.sleep(5) test = elapsed(test) test("hello", os.getenv("USER"))