def test(self): self.sess.run(self.test_iterator.initializer) losses = [] accs = [] while True: try: x_batch, y_batch = self.sess.run(self.test_next) except tf.errors.OutOfRangeError: break loss, acc = self.sess.run( [self.model.loss, self.model.acc], feed_dict={ self.model.x: x_batch, self.model.y: y_batch, self.model.training: False }) losses.append(loss) accs.append(acc) loss = np.mean(losses) acc = np.mean(accs) logger("Test {}/{}, loss: {:.3f}, acc: {:.3f}".format( self.model.global_step_tensor.eval(self.sess), self.model.cur_epoch_tensor.eval(self.sess), loss, acc)) summary = tf.summary.Summary(value=[ tf.summary.Summary.Value(tag="test/loss", simple_value=loss), tf.summary.Summary.Value(tag="test/acc", simple_value=acc) ]) self.test_writer.add_summary( summary, global_step=self.model.global_step_tensor.eval(self.sess))
def train_epoch(self): losses = [] accs = [] for i in range(self.config.steps_per_epoch): loss, acc = self.train_step() losses.append(loss) accs.append(acc) loss = np.mean(losses) acc = np.mean(accs) logger("Train {}/{}, loss: {:.3f}, acc: {:.3f}".format( self.model.global_step_tensor.eval(self.sess), self.model.cur_epoch_tensor.eval(self.sess), loss, acc)) summary = tf.summary.Summary(value=[ tf.summary.Summary.Value(tag="train/loss", simple_value=loss), tf.summary.Summary.Value(tag="train/acc", simple_value=acc) ]) self.train_writer.add_summary( summary, global_step=self.model.global_step_tensor.eval(self.sess))
def test(self): TXT = 'train/testImageList.txt' template = '''################## Summary ##################### Test Number: %d Time Consume: %.03f s FPS: %.03f LEVEL - %d Mean Error: Left Eye = %f Right Eye = %f Nose = %f Left Mouth = %f Right Mouth = %f Failure: Left Eye = %f Right Eye = %f Nose = %f Left Mouth = %f Right Mouth = %f ''' t = time.clock() # Restore variables from disk. try: saver = tf.train.Saver() ckpt = os.path.join(self.logdir, 'model.ckpt') saver.restore(self.sess, ckpt) print("Model restored.") except ValueError: print("Model not in model.ckpt") return data = getDataFromTxt(TXT) error = np.zeros((len(data), 5)) for i in range(len(data)): imgPath, bbox, landmarkGt, _, _, _, _ = data[i] print imgPath img = cv2.imread(imgPath, cv2.IMREAD_GRAYSCALE) assert (img is not None) logger("process %s" % imgPath) # Crop f_bbox = bbox.subBBox(-0.05, 1.05, -0.05, 1.05) f_face = img[int(f_bbox.top):int(f_bbox.bottom) + 1, int(f_bbox.left):int(f_bbox.right) + 1] # Resize f_face = cv2.resize(f_face, (39, 39)) f_face = f_face.reshape((39, 39, 1)) f_face = f_face / 255.0 landmarkP = self.sess.run(self.yhat_1, feed_dict={self.x: [f_face]}) landmarkP = landmarkP.reshape((5, 2)) # real landmark landmarkP = bbox.reprojectLandmark(landmarkP) landmarkGt = bbox.reprojectLandmark(landmarkGt) error[i] = evaluateError(landmarkGt, landmarkP, bbox) t = time.clock() - t N = len(error) fps = N / t errorMean = error.mean(0) # failure failure = np.zeros(5) threshold = 0.05 for i in range(5): failure[i] = float(sum(error[:, i] > threshold)) / N # log string s = template % (N, t, fps, 0, errorMean[0], errorMean[1], errorMean[2], \ errorMean[3], errorMean[4], failure[0], failure[1], failure[2], \ failure[3], failure[4]) print s name = 'fl_' + str(self.attribute) + '_crossstitch' logfile = 'log/' + name + '.log' with open(logfile, 'w') as fd: fd.write(s) fd.write('\n') alphas = [ var for var in tf.all_variables() if 'alpha' in var.name and 'Adam' not in var.name ] a_ws = [ var for var in tf.all_variables() if 'a_w' in var.name and 'Adam' not in var.name ] b_ws = [ var for var in tf.all_variables() if 'b_w' in var.name and 'Adam' not in var.name ] for alpha in alphas: fd.write(alpha.name + ": " + str(self.sess.run(tf.reduce_mean(alpha))) + '\n') for a_w, b_w in zip(a_ws, b_ws): fd.write(a_w.name + ": " + str(self.sess.run(tf.reduce_mean(a_w))) + '\n') fd.write(b_w.name + ": " + str(self.sess.run(tf.reduce_mean(b_w))) + '\n') # plot error hist plotError(error, name)
def test(self): template = '''################## Summary ##################### Test Number: %d Time Consume: %.03f s FPS: %.03f LEVEL - %d Mean Error: Left Eye = %f Right Eye = %f Nose = %f Left Mouth = %f Right Mouth = %f Failure: Left Eye = %f Right Eye = %f Nose = %f Left Mouth = %f Right Mouth = %f ''' t = time.clock() # Restore variables from disk. try: saver = tf.train.Saver() ckpt = os.path.join(self.logdir, 'model.ckpt') saver.restore(self.sess, ckpt) print("Model restored.") except ValueError: print("Model not in model.ckpt") return TXT = 'train/testImageList.txt' data = getDataFromTxt(TXT) error = np.zeros((len(data), 5)) for i in range(len(data)): imgPath, bbox, landmarkGt, _, _, _, _ = data[i] img = cv2.imread(imgPath, cv2.IMREAD_GRAYSCALE) assert(img is not None) logger("process %s" % imgPath) #landmarkP = P(img, bbox) # Crop f_bbox = bbox.subBBox(-0.05, 1.05, -0.05, 1.05) f_face = img[int(f_bbox.top):int(f_bbox.bottom)+1,int(f_bbox.left):int(f_bbox.right)+1] # Resize f_face = cv2.resize(f_face, (39, 39)) f_face = f_face.reshape((39, 39, 1)) f_face = f_face / 255.0 landmarkP = self.sess.run(self.yhat, feed_dict={self.x: [f_face]}) landmarkP = landmarkP.reshape((5,2)) # real landmark landmarkP = bbox.reprojectLandmark(landmarkP) landmarkGt = bbox.reprojectLandmark(landmarkGt) error[i] = evaluateError(landmarkGt, landmarkP, bbox) t = time.clock() - t N = len(error) fps = N / t errorMean = error.mean(0) print(error) print(errorMean) # failure failure = np.zeros(5) threshold = 0.05 for i in range(5): failure[i] = float(sum(error[:, i] > threshold)) / N # log string s = template % (N, t, fps, 0, errorMean[0], errorMean[1], errorMean[2], \ errorMean[3], errorMean[4], failure[0], failure[1], failure[2], \ failure[3], failure[4]) print s logfile = 'log/regression.log' with open(logfile, 'w') as fd: fd.write(s) # plot error hist plotError(error, 'regression')
def load(self, sess): latest_checkpoint = tf.train.latest_checkpoint(self.config.ck_dir) if latest_checkpoint: logger("Loading model ...") self.saver.restore(sess, latest_checkpoint) logger("Model loaded.")
def save(self, sess): logger("Saving model ...") self.saver.save(sess, self.config.ck_dir + "model", global_step=self.global_step_tensor) logger("Model saved.")