Exemple #1
0
                       name=params['name'],
                       data_stream_train=input_data,
                       data_stream_valid=valid_data,
                       data_stream_test=None,
                       logdir_train=params['log_dir_train'],
                       logdir_test=params['log_dir_test'],
                       saver_directory=params['saver_directory'],
                       tiny=params['tiny'],
                       w_loss=params['weighted_loss'],
                       w_summary=True,
                       joints=params['joint_list'],
                       modif=False)
model.generate_model()
load_file = None
model.training_init(nEpochs=params['nepochs'],
                    epochSize=epochSize,
                    saveStep=summary_steps,
                    load=load_file)

heatmaps = model.get_heatmaps(load=load_file)
print("Output heatmaps result. Shape: " + str(heatmaps.shape))

df_file_names = valid_data.df[valid_data.file_name_col]
gt_coords = valid_data.df[valid_data.coords_cols].as_matrix()
lm_cnt = valid_data.lm_cnt

pred_coord = heatmap_to_coord(heatmaps, valid_data.img_width,
                              valid_data.img_height)

write_point_result(pred_coord, gt_coords, lm_cnt, params,
                   params['valid_result_dir'])
Exemple #2
0
            # plot_heatmaps(dir = "/home/yichenhe/plumage/result/heatmap_test/heatmap_per_point/gt_sigma2/",
            #  heatmaps = heatmap_mini[0,...], img = img_mini[0,...],
            #   file_name = pred_names[start_id],  names = pred_data.points_names, img_per_row = 5)

        if 'save_heatmaps' in params and params['save_heatmaps']:
            save_heatmaps(dir=params['heatmap_dir'],
                          heatmaps=predict_mini[0, ...],
                          file_name=pred_names[start_id],
                          pt_names=pred_data.points_names)

            # save_heatmaps(dir = "/home/yichenhe/plumage/result/heatmap_test/heatmap_per_point/gt_sigma2/",
            #  heatmaps= heatmap_mini[0,...],
            #     file_name = pred_names[start_id],  pt_names =  pred_data.points_names)

        pred_coord_mini = heatmap_to_coord(predict_mini, pred_data.img_width,
                                           pred_data.img_height)

        pred_coords = np.vstack((pred_coords, pred_coord_mini))
    pred_coords = pred_coords[:pred_data.df_size, ...]
    if start_id % 1000 == 0:
        print(start_id, "steps")
    pred_df = write_pred_dataframe(pred_data,
                                   pred_coords,
                                   folder=params['pred_result_dir'] + "pred/",
                                   file_name=str(date.today()) +
                                   params.get("result_name", "result"),
                                   file_col_name=params['file_col'],
                                   patches_coord=None,
                                   write_index=False,
                                   is_valid=is_valid)
Exemple #3
0
def trainining(params, train_data, valid_data):

    config_name = params['config_name']

    #Calculate the training steps
    train_data_size = train_data.df_size
    one_epoch_steps = train_data_size//params['batch_size']
    params["one_epoch_steps"] = one_epoch_steps
    total_steps = (params['nepochs'] * train_data_size) //params['batch_size']
    summary_steps =  total_steps // params['summary_interval']
    valid_steps = total_steps // params['valid_interval']
    saver_steps = total_steps // params['saver_interval']
    print('Total steps: {}\nOne epoch: {}\nSum steps: {}, Valid steps: {}, Save steps: {}'.format(total_steps,one_epoch_steps,
        summary_steps,valid_steps,saver_steps))

    tf.reset_default_graph()
    model = network.Pose_Estimation(params,train_data.img_width, train_data.img_height )

    network_to_use = getattr(model, params['network_name'])
    predict = network_to_use()
    loss = model.loss()
    train_op = model.train_op(loss, model.global_step)

    #File name and paths
    param_dir = params['saver_directory']
    logdir = os.path.join(params['log_dir'], config_name)
    restore_file = params['restore_param_file']
    save_filename = config_name
    initialize = params['init']

    saver = tf.train.Saver()
    init_op = tf.global_variables_initializer()
    with tf.Session() as sess:
        if not os.path.exists(param_dir):
            os.makedirs(param_dir)
        if os.listdir(param_dir) == [] or initialize:
            print ("Initializing Network")
            sess.run(init_op)
        else:
            print("Restore file from: {}".format(restore_file))
            sess.run(init_op)
            saver.restore(sess, restore_file)

        #### Get the summary of training and weight.
        train_summary = tf.summary.merge_all('train')
        # weight_summary = tf.summary.merge_all('weight')
        writer = tf.summary.FileWriter(logdir, sess.graph)

            
        for i in range(total_steps):
            ####### Training part ########
            # Get input data and label from Training set, randomly.
            tmp_global_step = model.global_step.eval()
            img_mini, heatmap_mini,coords_mini , vis_mini = train_data.get_next_batch()
            # print(np.count_nonzero(vis_mini==0))
            feed_dict = {
                        model.images: img_mini,
                        model.labels:heatmap_mini,
                        model.vis_mask: vis_mini
                        }
            sess.run(train_op, feed_dict=feed_dict)

            ###### Train Summary part #####
            if (i+1) % summary_steps== 0 or i == 0:
                print("{} steps Loss: {}".format(i+1,sess.run(loss, feed_dict=feed_dict)))
                lear = model.learning_rate.eval()
                # print("\tGlobal steps and learning rates: {}  {}".format(tmp_global_step,lear))
                temp_summary = sess.run(train_summary, feed_dict=feed_dict)    
                writer.add_summary(temp_summary, tmp_global_step)
                # lr_list = np.append(lr_list, loss.eval(feed_dict=feed_dict))
            ######Validating the result part#####    
            if (i+1) % valid_steps ==0 or i == 0:
                #Validation part
                #write the validation result
                loss_list = np.array([])
                pred_coords = np.zeros((0, 2*model.points_num))
                for i_df_valid in np.arange(0,valid_data.df.shape[0],valid_data.batch_size):
                    img_mini, heatmap_mini,coords_mini , vis_mini = valid_data.get_next_batch_no_random()
                    feed_dict = {
                        model.images: img_mini,
                        model.labels: heatmap_mini,
                        model.vis_mask: vis_mini
                        }            
                    _loss, _prediction_mini = sess.run([loss,predict], feed_dict=feed_dict)
                    loss_list = np.append(loss_list,_loss)

                    pred_coord_mini = heatmap_to_coord(_prediction_mini , valid_data.img_width, valid_data.img_height)
                    pred_coords = np.vstack((pred_coords, pred_coord_mini))    
                pred_coords = pred_coords[:valid_data.df_size,...]    
                gt_coords = valid_data.df[valid_data.coords_cols].values

                diff_per_pt ,pck= pck_accuracy(pred_coords , gt_coords,
                    lm_cnt = valid_data.lm_cnt , pck_threshold = params['pck_threshold'],scale = 1)
                ave_diff = np.nanmean(diff_per_pt)
                summary = sess.run(model.valid_summary,
                    feed_dict = { model.point_acc:diff_per_pt, 
                                    model.valid_loss:np.mean(loss_list),
                                    model.ave_pts_diff:ave_diff})
                writer.add_summary(summary , tmp_global_step)  
            ####### Save the parameters to computers.
            if (i + 1) % saver_steps == 0:        
                tmp_global_step = model.global_step.eval()
                epochs = (tmp_global_step*params["batch_size"])//train_data_size
                model.save(sess, saver, save_filename,epochs)  

        params['restore_param_file'] = "{}-{}".format(save_filename, epochs)
    return model, predict    
Exemple #4
0
def get_and_eval_result(params, valid_data):
    params_valid = params.copy()
    params_valid['is_train'] = False
    params_valid['l2'] = 0.0

    tf.reset_default_graph()
    model = network.Pose_Estimation(params_valid,
        valid_data.img_width, valid_data.img_height )

    network_to_use = getattr(model, params_valid['network_name'])
    predict = network_to_use()

    saver = tf.train.Saver()
    init_op = tf.global_variables_initializer()
    # Get the predictions:
    with tf.Session() as sess:
        sess.run(init_op)
        saver.restore(sess, params_valid['saver_directory'] + params_valid["restore_param_file"])

        pred_coords = np.zeros((0, 2*valid_data.lm_cnt))
        for i_df_valid in np.arange(0,valid_data.df.shape[0],valid_data.batch_size):
            img_mini, heatmap_mini,coords_mini , vis_mini = valid_data.get_next_batch_no_random()
            feed_dict = {
                model.images: img_mini,
                model.labels: heatmap_mini,
                model.vis_mask: vis_mini
                }            
            _prediction_mini = sess.run(predict, feed_dict=feed_dict)

            pred_coord_mini = heatmap_to_coord(_prediction_mini , valid_data.img_width, valid_data.img_height)
            pred_coords = np.vstack((pred_coords, pred_coord_mini))    

        pred_coords = pred_coords[:valid_data.df_size,...]    

    gt_coords = valid_data.df[valid_data.coords_cols].values

    ## Create patches and calculate the pixels inside the patch and correlation.

    diff_per_pt ,pck= pck_accuracy(pred_coords , gt_coords,
            lm_cnt = valid_data.lm_cnt , pck_threshold = params_valid['pck_threshold'],scale = 1)

    _ ,pck_50= pck_accuracy(pred_coords , gt_coords,
            lm_cnt = valid_data.lm_cnt , pck_threshold = 50,scale = 1)
    _ ,pck_150= pck_accuracy(pred_coords , gt_coords,
            lm_cnt = valid_data.lm_cnt , pck_threshold = 150,scale = 1)
    _ ,pck_200= pck_accuracy(pred_coords , gt_coords,
            lm_cnt = valid_data.lm_cnt , pck_threshold = 200,scale = 1)
    _ ,pck_300= pck_accuracy(pred_coords , gt_coords,
            lm_cnt = valid_data.lm_cnt , pck_threshold = 300,scale = 1)
    # Try different dimension of rectangle



    write_pred_dataframe(valid_data , pred_coords ,
        folder = params_valid['valid_result_dir']+"grid_temp/",
        file_name = params['config_name'], file_col_name = params['file_col'],
        patches_coord=None, write_index = False )

    result_dict = build_result_dict(result_dict = params_valid,
        pck = np.round(pck, 4), mean_pck = round(np.nanmean(pck), 4), pck_threshold = params_valid['pck_threshold'],
        diff_per_pt=np.round(diff_per_pt, 4), mean_diff_per_pt = round(np.nanmean(diff_per_pt), 4),
        pck_50 = pck_50, pck_150 = pck_150 , pck_200 = pck_200 , pck_300 = pck_300)
    result_dict['result_names'] = params['config_name'] +".csv"

    return result_dict, pred_coords
Exemple #5
0
                    pred_coords = np.zeros((0, 2 * model.points_num))
                    for i_df_valid in np.arange(0, valid_data.df.shape[0],
                                                valid_data.batch_size):
                        img_mini, heatmap_mini, coords_mini, vis_mini = valid_data.get_next_batch_no_random(
                        )
                        feed_dict = {
                            model.images: img_mini,
                            model.labels: heatmap_mini,
                            model.vis_mask: vis_mini
                        }
                        _loss, _prediction_mini = sess.run([loss, predict],
                                                           feed_dict=feed_dict)
                        loss_list = np.append(loss_list, _loss)

                        pred_coord_mini = heatmap_to_coord(
                            _prediction_mini, valid_data.img_width,
                            valid_data.img_height)
                        pred_coords = np.vstack((pred_coords, pred_coord_mini))
                        # _prediction = np.vstack([_prediction,sess.run(predict, feed_dict=feed_dict)])
                    pred_coords = pred_coords[:valid_data.df_size, ...]
                    gt_coords = valid_data.df[valid_data.coords_cols].values
                    # pred_coord = heatmap_to_coord(_prediction , valid_data.img_width , valid_data.img_height)

                    diff_per_pt, pck = pck_accuracy(
                        pred_coords,
                        gt_coords,
                        lm_cnt=valid_data.lm_cnt,
                        pck_threshold=params['pck_threshold'],
                        scale=1)

                    summary = sess.run(model.valid_summary,