Example #1
0
# Start TF
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.4)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
sess.run(tf.global_variables_initializer())
tf.train.start_queue_runners(sess=sess)

# initialize network weights
if USE_RETRAINED:
    # retrained version
    last_cpt = tf.train.latest_checkpoint(PATH_TO_SNAPSHOTS)
    assert last_cpt is not None, "Could not locate snapshot to load. Did you already train the network and set the path accordingly?"
    load_weights_from_snapshot(sess, last_cpt, discard_list=['Adam', 'global_step', 'beta'])
    print('loading weights from {}'.format(last_cpt))
else:
    # load weights used in the paper
    net.init('./weights/pose_model.npy', sess)
    # net.init_pickle(sess, ['./snapshots_cpm_rotate_s10_wrist_dome/model-100000.pickle'], ['scale'])

util = EvalUtil()
# iterate dataset

results = []
for i in range(dataset.num_samples):
    # get prediction
    # crop_scale, keypoints_scoremap_v, kp_uv21_gt, kp_vis, image_crop, crop_center, img_dir, hand_side, head_size \
    #     = sess.run([data['crop_scale'], keypoints_scoremap, data['keypoint_uv21'], data['keypoint_vis21'], data['image_crop'], data['crop_center'], data['img_dir'], data['hand_side'], data['head_size']])
    crop_scale, keypoints_scoremap_v, kp_uv21_gt, kp_vis, image_crop, crop_center, img_dir, hand_side \
        = sess.run([data['crop_scale'], keypoints_scoremap, data['keypoint_uv21'], data['keypoint_vis21'], data['image_crop'], data['crop_center'], data['img_dir'], data['hand_side']])

    keypoints_scoremap_v = np.squeeze(keypoints_scoremap_v)
    kp_uv21_gt = np.squeeze(kp_uv21_gt)
Example #2
0
image_crop = (data['image_crop'])
image_crop = image_crop[:, :, ::-1, ::-1]  # convert to BGR
# build network
net = CPM(out_chan=22)

# feed through network
scoremap, _ = net.inference(image_crop)[-1]

# Start TF
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.3)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
sess.run(tf.global_variables_initializer())
tf.train.start_queue_runners(sess=sess)

weight_path = './weights/pose_model.npy'
net.init(weight_path, sess)

util = EvalUtil()
# iterate dataset
for i in range(dataset.num_samples):
    # get prediction
    keypoint_xyz21, keypoint_vis21, keypoint_scale, keypoint_uv21_v, image_crop_v, scoremap_v = \
        sess.run([data['keypoint_xyz21'], data['keypoint_vis21'], data['keypoint_scale'], data['keypoint_uv21'], image_crop, scoremap])

    keypoint_xyz21 = np.squeeze(keypoint_xyz21)
    keypoint_vis21 = np.squeeze(keypoint_vis21)
    keypoint_scale = np.squeeze(keypoint_scale)
    keypoint_uv21_v = np.squeeze(keypoint_uv21_v)
    image_crop_v = np.squeeze((image_crop_v + 0.5) * 256).astype(np.uint8)
    scoremap_v = np.squeeze(scoremap_v)
    for ik in (1, 5, 9, 13, 17):
    tf.summary.scalar('loss', total_loss)
    tf.summary.scalar('loss_PAF', total_loss_PAF)
    tf.summary.scalar('loss_2d', total_loss_2d)

    # init weights
    sess.run(tf.global_variables_initializer())
    saver = tf.train.Saver(max_to_keep=None)

    merged = tf.summary.merge_all()
    train_writer = tf.summary.FileWriter(train_para['snapshot_dir'] + '/train',
                                         sess.graph)

    if not fine_tune:
        start_iter = 0
        if net.name == 'CPM':
            net.init('./weights/openpose_body_3DPAF_randomz_headtop_chest.npy',
                     sess)
            # net.init('./weights/openpose_body_expanded_PAF.npy', sess)
        elif net.name == 'Hourglass':
            from tensorflow.contrib.framework import assign_from_values_fn
            with open('weights/Hourglass_weights_processed.pkl', 'rb') as f:
                hg_data = pickle.load(f)
            map_trainable_variables = {
                i.name.replace('hourglass', 'my_model').replace(':0', ''):
                i.name
                for i in tf.trainable_variables()
            }
            dic = dict()
            for i, j in map_trainable_variables.items():
                if i in hg_data:
                    dic[j] = hg_data[i]
            init_fn = assign_from_values_fn(dic)