Example #1
0
import numpy as np
import os
from human.model.human import HuMAn
from human.utils import dataset
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1"  # Hide unnecessary TF messages
import tensorflow as tf  # noqa: E402

SAVES = ["train_universal", "train_bmlhandball", "transfer_bmlhandball"]

if __name__ == "__main__":
    # Path where the TFRecords are located
    tfr_home = "../../AMASS/tfrecords"
    # Path where the saved models are located
    saves_home = "../training/saves"
    # The HuMAn neural network
    model = HuMAn()
    # Create groups of saved models and evaluation datasets
    saves = []
    datasets = []
    tags = []
    # Model: universal / Dataset: validation
    saves.append([os.path.join(saves_home, "train_universal")])
    datasets.append(
        [dataset.folder_to_dataset(os.path.join(tfr_home, "valid_256"))])
    tags.append("universal_valid")
    # Model: universal / Dataset: BML
    saves.append([os.path.join(saves_home, "train_universal")])
    datasets.append(
        [dataset.folder_to_dataset(os.path.join(tfr_home, "BMLhandball_256"))])
    tags.append("universal_bml")
    # Model: train BML / Dataset: validation
Example #2
0
# Create lists to store all possible combinations
HORIZON_FRAMES = []
# Fill the lists
for key, value in SAMPLING_FRAMES.items():
    HORIZON_FRAMES.extend(list(value))
# Turn lists into sets, removing repeated values, then turn into NumPy arrays
HORIZON_FRAMES = np.array(list(set(HORIZON_FRAMES)))

if __name__ == "__main__":
    # Path where the TFRecords are located
    tfr_home = "../../AMASS/tfrecords"
    # Load the validation dataset
    parsed_ds = dataset.folder_to_dataset(os.path.join(tfr_home, "valid_256"))
    # Load the HuMAn neural network
    # Expects a normalization layer already adapted
    model = HuMAn()
    # Load weights from saved model
    saves_path = "../training/saves/train_universal"
    model.load_weights(saves_path)
    # Split the maximum horizon time (0.5 seconds) into bins
    n_bins = 3
    bin_limits = [0, 0.5 / 3, 2 * 0.5 / 3, 0.5]
    # Create lists to store the absolute error values into bins
    err_bins = [[] for _ in range(n_bins)]
    # Iterate through all specified horizon frames
    for horizon_frames in HORIZON_FRAMES:
        # Load validation data
        mapped_ds = parsed_ds.map(lambda x: dataset.map_dataset(
            x, skeleton="full_body", horizon_frames=horizon_frames),
                                  num_parallel_calls=tf.data.AUTOTUNE,
                                  deterministic=True)
Example #3
0
if __name__ == "__main__":
    # Path where the TFRecords are located
    tfr_home = "../../AMASS/tfrecords"
    # Load the testing dataset
    parsed_ds = dataset.load_splits(tfr_home, splits=["test_1024"])
    # Load the test data
    test_ds = (parsed_ds["test_1024"].map(
        dataset.map_test,
        num_parallel_calls=tf.data.AUTOTUNE,
        deterministic=False).shuffle(SHUFFLE_BUFFER).batch(1).prefetch(
            tf.data.AUTOTUNE))
    test_iter = iter(test_ds)
    # The HuMAn neural network
    # Expects normalization layer adapted, as model is trained
    model = HuMAn()
    # Load weights from checkpoints
    ckpt_path = "checkpoints"
    latest_ckpt = tf.train.latest_checkpoint(ckpt_path)
    model.load_weights(latest_ckpt)
    # Make a prediction
    inputs, pose_target, aux = next(test_iter)
    prediction = model.predict(inputs)
    framerate = np.round(1 / aux["dt"].numpy().item(), decimals=1)
    horizon = np.round(inputs['horizon_input'][0, 0].numpy().item(),
                       decimals=4)
    print(f"Input framerate is {framerate} Hz.")
    print(f"Prediction horizon is {horizon} s.")

    fig, axs = plt.subplots(3, 1)
    axs[0].plot(prediction[0])
Example #4
0
File: train.py Project: Vtn21/HuMAn
 else:
     if args.p == "train":
         cfg = train_universal
     else:
         print("The universal dataset does not support transfer learning.")
         sys.exit()
 # Build a dataset for adapting the normalization layer
 norm_folder = os.path.join(common.tfr_home, "train_256")
 norm_ds = (dataset.folder_to_dataset(norm_folder)
            .map(dataset.map_pose_input,
                 num_parallel_calls=tf.data.AUTOTUNE,
                 deterministic=False)
            .batch(cfg.batch_size[-1])
            .prefetch(tf.data.AUTOTUNE))
 # Instantiate the HuMAn neural network
 model = HuMAn(norm_dataset=norm_ds)
 if args.d == "universal":
     # Create the datasets
     train_datasets = []
     valid_datasets = []
     for seq_len in cfg.seq_len:
         train_folder = os.path.join(common.tfr_home, f"train_{seq_len}")
         train_datasets.append(dataset.folder_to_dataset(train_folder))
         valid_folder = os.path.join(common.tfr_home, f"valid_{seq_len}")
         valid_datasets.append(dataset.folder_to_dataset(valid_folder))
         full_training_loop(model,
                            train_datasets=train_datasets,
                            valid_datasets=valid_datasets,
                            seq_lengths=cfg.seq_len,
                            batch_sizes=cfg.batch_size,
                            swa=cfg.swa,