def test():

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    config.gpu_options.visible_device_list = "0"
    with tf.Session(config=config) as sess:

        model = create_model(sess, FLAGS.batch_size)

        list_of_experiments = glob.glob(os.path.join(".", "log_HG2D", "*"))
        p2d_dir = os.path.join(
            max(list_of_experiments, key=os.path.getctime),
            "predictions")  # latest created dir for latest experiment
        list_of_preds = glob.glob(os.path.join(p2d_dir, "*"))

        print("\n")
        for test_fname in list_of_preds:
            test_fname = test_fname.split("/")[-1]
            print("Converting {} to p3d".format(test_fname))
            a = time.time()

            encoder_inputs = model.get_test_data(
                os.path.join(p2d_dir, test_fname))
            encoder_inputs = model.input_normalizer.transform(encoder_inputs)

            decoder_outputs = model.test_step(encoder_inputs, sess)

            decoder_outputs = model.output_normalizer.inverse_transform(
                decoder_outputs)

            for i in range(decoder_outputs.shape[0]):
                temp = decoder_outputs[i].reshape(17, 3)
                temp -= temp[0]
                decoder_outputs[i] = temp.reshape(17 * 3)

            print("Done in {} s".format(time.time() - a))

            experiment = p2d_dir.split("/")[-2]
            submissions_dir = os.path.join(
                train_dir, "submissions_for_{}".format(experiment))
            if not os.path.exists(submissions_dir):
                os.makedirs(submissions_dir)

            step_nb = test_fname.split(".")[-2].split("_")[-1]
            out_file = os.path.join(submissions_dir,
                                    "p3d_step_{}.csv.gz".format(step_nb))
            print("saving at: {}".format(out_file))
            print("\n\n")
            utils.generate_submission_3d(decoder_outputs, out_file)

        print("Saving code ...")
        submission_files = [
            "data.py", "hourglass2D_model.py", "linear_model.py",
            "train_hourglass2D.py", "train_simple_baseline.py",
            "test_simple_baseline.py", "layers.py", "setup.py", "utils.py",
            "vis.py"
        ]
        utils.create_zip_code_files(os.path.join(submissions_dir, "code.zip"),
                                    submission_files)
        print("Done.")
Ejemplo n.º 2
0
                    help='log storage dir for tensorboard')
opt = parser.parse_args()

with tf.Session() as sess:

    # define resnet model
    sample = create_test_dataloader(data_root=opt.data_root,
                                    batch_size=opt.batch_size)
    with tf.variable_scope('model'):
        model = resnet_model.Model()
        p3d_out_norm = model(sample['image'], training=False)
    p3d_out = unnormalize_pose(p3d_out_norm)
    p3d_out = tf.reshape(p3d_out, [-1, 51])

    # restore weights
    saver = tf.train.Saver()
    saver.restore(sess, tf.train.latest_checkpoint(opt.log_dir))

    predictions = None
    with trange(math.ceil(meta_info.NUM_SAMPLES_TEST / opt.batch_size)) as t:
        for i in t:
            p3d_out_ = sess.run(p3d_out)

            if predictions is None:
                predictions = p3d_out_
            else:
                predictions = np.concatenate([predictions, p3d_out_], axis=0)

    generate_submission(predictions, 'submission.csv.gz')
    create_zip_code_files('code.zip')
Ejemplo n.º 3
0
                            predictions = np.concatenate(
                                [predictions, p3d_out_value], axis=0)

                predictions = predictions.reshape([-1, 51])
                print("\nPredictions shape:", predictions.shape)
                sys.stdout.flush()

                submissions_dir = os.path.join(LOG_PATH, "submissions")
                if not os.path.exists(submissions_dir):
                    os.makedirs(submissions_dir)
                utils.generate_submission_3d(
                    predictions,
                    os.path.join(
                        submissions_dir,
                        "submission_{}.csv.gz".format(global_step_val)))

                submission_files = [
                    "data.py", "hourglass3D_model.py", "test_hourglass3D.py",
                    "train_hourglass3D.py", "layers.py", "setup.py",
                    "utils.py", "vis.py"
                ]
                utils.create_zip_code_files(
                    os.path.join(submissions_dir,
                                 "code_{}.zip".format(global_step_val)),
                    submission_files)

    global_step_val = sess.run(global_step)  # get the global step value
    saver.save(sess,
               os.path.join(CHECKPOINTS_PATH, "model"),
               global_step=global_step_val)  # save at the end of training
Ejemplo n.º 4
0
import numpy as np
import os
import math

# Global Attribute
BATCH_SIZE = 1 # needs to be 1 !
DATA_PATH = "/cluster/project/infk/hilliges/lectures/mp19/project2/"
SAVE_PATH= "./submitted_weights"

config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.gpu_options.visible_device_list = "0"
with tf.Session(config=config) as sess:

    testGenerator = DataGenerator(DATA_PATH, batch_size=BATCH_SIZE, name="test")
    model=get_model(batch_size=BATCH_SIZE, train=False)
    # Load weights into the new model
    latest = tf.train.latest_checkpoint(SAVE_PATH)
    print(latest)
    model.load_weights(latest)
    # predict 3d pose
    model.compile(optimizer='adam', loss=tf.losses.mean_squared_error)
    p3d_out = model.predict_generator(testGenerator, verbose=1)

    # compute MPJPE
    p3d_out = unnormalize_pose_numpy(p3d_out, 0, 1100*2)

    generate_submission(p3d_out,"submission.csv.gz")

    create_zip_code_files("code.zip")
    all_heatmaps_pred, p3d_pred = model(im, training=False)

    # restore weights
    print("Restoring latest model from {}\n".format(RESTORE_PATH))
    saver = tf.train.Saver()
    saver.restore(sess, tf.train.latest_checkpoint(RESTORE_PATH))

    predictions = None
    with trange(NUM_SAMPLES) as t:  # generate predictions for all images
        for i in t:
            image, p3d_out_value = sess.run([im, p3d_pred])

            #            image = ((image[0]+1)*128.0).transpose(1,2,0).astype("uint8") # unnormalize, put in channels_last format and cast to uint8
            #            save_dir = os.path.join(os.getcwd(), "test_samples")
            #            utils.save_p3d_image(image, None, p3d_out_value[0], save_dir, i+1)

            if predictions is None:
                predictions = p3d_out_value
            else:
                predictions = np.concatenate([predictions, p3d_out_value],
                                             axis=0)

    predictions = predictions.reshape([-1, 51])
    print(predictions.shape)
    utils.generate_submission_3d(predictions, "submission.csv.gz")
    submission_files = [
        "data.py", "hourglass3D_model.py", "test_hourglass3D.py",
        "train_hourglass3D.py", "layers.py", "setup.py", "utils.py", "vis.py"
    ]
    utils.create_zip_code_files("code.zip", submission_files)
Ejemplo n.º 6
0
for f in list_of_files:
    preds = pd.read_csv(f, header=0, index_col=0).values
    if merged_preds is None:
        merged_preds = preds
    else:
        merged_preds = merged_preds + preds
    n += 1

merged_preds = merged_preds / n
print("Done.")

print("Saving merged 3D predictions and code to '{}'".format(OUT_PATH))

if not os.path.exists(OUT_PATH):
    os.makedirs(OUT_PATH)

utils.generate_submission_3d(
    merged_preds, os.path.join(OUT_PATH, "submission_merged.csv.gz"))

submission_files = [
    "data.py", "hourglass2D_model.py", "hourglass3D_model.py",
    "linear_model.py", "merge_predictions.py", "test_hourglass2D.py",
    "test_hourglass3D.py", "test_simple_baseline.py", "train_hourglass2D.py",
    "train_hourglass3D.py", "train_simple_baseline.py", "layers.py",
    "setup.py", "utils.py", "vis.py"
]
utils.create_zip_code_files(os.path.join(OUT_PATH, "code.zip"),
                            submission_files)

print("Done.")