Exemplo n.º 1
0
 def __load_data_set(self):
     tools.printf("Loading training data...")
     self.train_data_gen = data_roller.StatefulRollerDataGen(self.cfg, config.dataset_path, self.train_sequences,
                                                             frames=self.train_frames_range)
     tools.printf("Loading validation data...")
     self.val_data_gen = data_roller.StatefulRollerDataGen(self.cfg, config.dataset_path, [self.val_sequence],
                                                           frames=self.val_frames_range)
import data_roller
import config

cfg = config.SeqTrainConfigsSmallSteps
data_roller.StatefulRollerDataGen(cfg, "/home/cs4li/Dev/KITTI/dataset/",
                                  ["06"])
# val_merged_summary_op = tf.summary.merge([val_loss_sum, val_fc_sum, val_se3_sum])

val_loss_sum = tf.summary.scalar("training_loss_val", total_losses)
val_fc_sum = tf.summary.scalar("fc_losses_val", fc_losses)
val_se3_sum = tf.summary.scalar("se3_losses_val", se3_losses)
val_z_sum = tf.summary.scalar("z_loss_val", z_loss)

val_merged_summary_op = tf.summary.merge(
    [val_loss_sum, val_fc_sum, val_se3_sum, val_z_sum])

# ================ LOADING DATASET ===================

tools.printf("Loading training data...")
train_sequences = ["00", "01", "02", "08", "09"]
train_data_gen = data.StatefulRollerDataGen(cfg,
                                            config.dataset_path,
                                            train_sequences,
                                            frames=None)
tools.printf("Loading validation data...")
validation_sequences = ["07"]
val_data_gen = data.StatefulRollerDataGen(cfg,
                                          config.dataset_path,
                                          validation_sequences,
                                          frames=[
                                              range(500),
                                          ])


# ============== For Validation =============
def calc_val_loss(sess, writer, i_epoch, alpha_set, run_options, run_metadata):
    curr_lstm_states = np.zeros(
        [2, cfg.lstm_layers, cfg.batch_size, cfg.lstm_size])
                              tf.constant(np.zeros([2, cfg_si.lstm_layers, cfg_si.batch_size, cfg_si.lstm_size]),
                                          dtype=tf.float32),
                              initial_poses_si,
                              imu_data_si,
                              np.zeros([cfg_si.batch_size, 17], dtype=np.float32),
                              0.01 * np.repeat(np.expand_dims(np.identity(17, dtype=np.float32), axis=0),
                                               repeats=cfg_si.batch_size, axis=0),
                              tf.constant(0.1, dtype=tf.float32),
                              tf.constant(False, dtype=tf.bool),  # is training
                              False,
                              tf.constant(True, dtype=tf.bool),  # use initializer
                              cfg_si.use_ekf)

for kitti_seq in kitti_seqs:
    tools.printf("Loading eval data...")
    data_gen = data.StatefulRollerDataGen(cfg, config.dataset_path, [kitti_seq])
    if cfg_si.use_init:
        tools.printf("Loading eval data for initial LSTM states...")
        data_gen_si = data.StatefulRollerDataGen(cfg_si, config.dataset_path, [kitti_seq],
                                                 frames=[range(0, cfg_si.timesteps + 1)])

    # results_dir_path = os.path.join(config.save_path, dir_name)
    results_dir_path = os.path.join(os.path.dirname(restore_model_file), dir_name)
    if not os.path.exists(results_dir_path):
        os.makedirs(results_dir_path)

    # ==== Read Model Checkpoints =====
    variable_to_load = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, "^(?!optimizer).*")
    tf_restore_saver = tf.train.Saver(variable_to_load)

    with tf.Session() as sess:
rel_disp = tf.concat(
    [ekf_out_states[1:, :, 0:3], ekf_out_states[1:, :, 11:14]], axis=-1)
rel_covar = tf.concat([
    tf.concat(
        [ekf_out_covar[1:, :, 0:3, 0:3], ekf_out_covar[1:, :, 0:3, 11:14]],
        axis=-1),
    tf.concat(
        [ekf_out_covar[1:, :, 11:14, 0:3], ekf_out_covar[1:, :, 11:14, 11:14]],
        axis=-1)
],
                      axis=-2)

se3_outputs = model.se3_layer(rel_disp, initial_poses)

data_gen = data_roller.StatefulRollerDataGen(cfg,
                                             config.dataset_path, [kitti_seq],
                                             frames=frames)

results_dir_path = os.path.join(config.save_path, "ekf_debug")
if not os.path.exists(results_dir_path):
    os.makedirs(results_dir_path)

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())

    total_batches = data_gen.total_batches()
    tools.printf("Start evaluation loop...")

    prediction = np.zeros([total_batches + 1, 7])
    ground_truths = np.zeros([total_batches + 1, 7])
    ekf_states = np.zeros([total_batches + 1, 17])
# if kitti_seq in ["11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21"]:
#     save_ground_truth = False
# else:
#     save_ground_truth = True

save_ground_truth = True
cfg = config.SeqEvalLidarConfig

tools.printf("Building eval model....")
inputs, lstm_initial_state, initial_poses, \
is_training, fc_outputs, se3_outputs, lstm_states = model.build_seq_model(cfg)

for kitti_seq in kitti_seqs:
    tools.printf("Loading training data...")
    train_data_gen = data.StatefulRollerDataGen(cfg, config.dataset_path,
                                                [kitti_seq])

    results_dir_path = os.path.join(config.save_path, dir_name)
    if not os.path.exists(results_dir_path):
        os.makedirs(results_dir_path)

    # ==== Read Model Checkpoints =====
    restore_model_file = "/home/cs4li/Dev/end_to_end_visual_odometry/results/train_seq_20180418-16-37-02/best_val/model_best_val_checkpoint-143"

    variable_to_load = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
                                         "^(cnn_layer|rnn_layer|fc_layer).*")
    tf_restore_saver = tf.train.Saver(variable_to_load)

    with tf.Session() as sess:
        tools.printf("Restoring model weights from %s..." % restore_model_file)
        tf_restore_saver.restore(sess, restore_model_file)