Example #1
0
validSummary = tf.summary.scalar("test error", validErrorPH)
tfSummaryWriter = tf.summary.FileWriter("summary_{1}/{0}".format(
    saveFname, suffix))
resumeIterN = 0
maxIterN = 30000
with tf.Session(config=tfConfig) as sess:
    if resumeIterN == 0:
        sess.run(tf.global_variables_initializer())
    else:
        tfSaver.restore(
            sess,
            "models_{2}/{0}_it{1}k.ckpt".format(saveFname, resumeIterN // 1000,
                                                suffix))
        print("resuming from iteration {0}...".format(resumeIterN))
    tfSummaryWriter.add_graph(sess.graph)
    params.baseLRST = 0.0001
    # training loop
    for i in range(resumeIterN, maxIterN):
        currLearningRate = params.baseLRST, params.baseLR  # this can be modified to be scheduled learning rates
        randIdx = np.random.randint(trainN, size=[params.batchSize])
        trainBatch = {
            imageRawBatch: trainData["image"][randIdx],
            labelBatch: trainData["label"][randIdx],
            learningRate: currLearningRate
        }
        # run one step
        _, trainBatchLoss, summary = sess.run([trainStep, loss, lossSummary],
                                              feed_dict=trainBatch)
        if (i + 1) % 10 == 0:
            tfSummaryWriter.add_summary(summary, i + 1)
        if (i + 1) % 100 == 0: