ut.vprint(niter, ['1lr'], [LR]) ut.vprint(niter, ['1nstd'], [NSTD]) else: ut.vprint(niter,[tnames[0]],[outs[0][0]]) niter=niter+1 ## Save model weights if needed if SAVEFREQ > 0 and niter % SAVEFREQ == 0: mfn = wts+"/iter_%06d.model.npz" % niter sfn = wts+"/iter_%06d.state.npz" % niter ut.mprint("Saving model to " + mfn ) ut.saveNet(mfn,model,sess) ut.mprint("Saving state to " + sfn ) ut.saveAdam(sfn,opt,model.weights,sess) ut.mprint("Done!") msave.clean(every=SAVEFREQ,last=1) ssave.clean(every=SAVEFREQ,last=1) ## Learning rate drop if niter == 4e5: sess.run(tf.assign(lr, LR/np.sqrt(10.0))) elif niter == 5e5: sess.run(tf.assign(lr, LR/10.0)) # Save last if msave.iter < niter: mfn = wts+"/iter_%06d.model.npz" % niter sfn = wts+"/iter_%06d.state.npz" % niter
break if niter % ESIZE == 0: idx = rs.permutation(len(tlist)) blst = [tlist[idx[(niter%ESIZE)*BSZ+b]] for b in range(BSZ)] outs,_ = sess.run([vals,tstep],feed_dict=d.fdict(blst)) niter = niter+1 touts = touts+np.float32(outs) if niter % SAVEITER == 0: ut.saveNet('wts/model_%d.npz'%niter,net,sess) saver.clean(every=SAVEITER,last=1) ut.mprint('Saved Model') if niter % DISPITER == 0: touts = touts/np.float32(DISPITER) ut.vprint(niter,['lr']+tnms,[LR]+list(touts)) touts = 0. if ut.stop: break if niter > saver.iter: ut.saveNet('wts/model_%d.npz'%niter,net,sess) saver.clean(every=SAVEITER,last=1) ut.mprint('Saved Model') if niter > origiter: ut.saveAdam('wts/opt.npz',opt,net.weights,sess) ut.mprint("Saved Optimizer.")