nErrors = len(errors) xAxis = list(np.arange(0., failureThreshold + step, step)) ced = [float(np.count_nonzero([errors <= x])) / nErrors for x in xAxis] AUC = simps(ced, x=xAxis) / failureThreshold failureRate = 1. - ced[-1] print('AUC @ %.3f : %.3f' % (failureThreshold, AUC)) print('Failure rate: %.3f' % (failureRate)) print('Meanerror : %.3f' % (np.mean(errors))) tfrecordsTrain = 'data/train.tfrecords' tfrecordsTest = 'data/validation.tfrecords' initmarks = np.load('data/initlandmark.npy') initmarks = tf.constant(initmarks, dtype=tf.float32) imgs_batch, landmarks_batch = loadTfrecords(tfrecordsTrain, batch_size) #imgs_test, landmarks_test = loadTfrecords(tfrecordsTest ,batch_size) x = tf.placeholder(dtype=tf.float32, shape=(None, 112, 112, 1)) y = tf.placeholder(dtype=tf.float32, shape=(None, 136)) training = FaceAlignmnetTraining(initmarks, batch_size, STAGE) dan = training.createCNN(x, y, STAGE) saver = tf.train.Saver() #print(model.shape) #tf.reset_default_graph() with tf.Session() as sess: curTime = time.clock() saver = tf.train.Saver( var_list=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='S1')) sess.run(tf.global_variables_initializer()) saver.restore(sess, './model/S1_model_iter-0') print("Starting training......")
error = np.mean(np.sqrt(np.sum( (gtLandmarks - resLandmarks)**2, axis=1))) / normDist return error def AUCError(errors, failureThreshold, step=0.0001, showCurve=False): nErrors = len(errors) xAxis = list(np.arange(0., failureThreshold + step, step)) ced = [float(np.count_nonzero([errors <= x])) / nErrors for x in xAxis] AUC = simps(ced, x=xAxis) / failureThreshold failureRate = 1. - ced[-1] print('AUC @ %.3f : %.3f' % (failureThreshold, AUC)) print('Failure rate: %.3f' % (failureRate)) imgs_batch, landmarks_batch = loadTfrecords(tfrecordsPath, batch_size) x = tf.placeholder(dtype=tf.float32, shape=(None, 112, 112, 1)) y = tf.placeholder(dtype=tf.float32, shape=(None, 136)) training = FaceAlignmnetTraining(initmarks, batch_size, STAGE) dan = training.createCNN(x, y, l2_scale, STAGE) saver = tf.train.Saver() with tf.Session() as sess: dataset = np.load(testSet) dataImgs = (dataset["imgs"] - meanImg) / stdDevImg #print(dataImgs.shape) dataLandmarks = dataset["gtLandmarks"] sess.run(tf.global_variables_initializer()) saver.restore(sess, outputsnap) errors = []