Ejemplo n.º 1
0
    mkdir(logdir)
    sys.stdout = open(str(logdir) + '/log.txt', 'w')

cfg = ModelConfig(mpii_dataconf.input_shape, pa17j3d, num_pyramids=8,
        action_pyramids=[], num_levels=4)

num_predictions = spnet.get_num_predictions(cfg.num_pyramids, cfg.num_levels)

start_lr = 0.001
weights_path = os.path.join(logdir, 'weights_posebaseline_{epoch:03d}.hdf5')

batch_size_mpii = 14
batch_size_ar = 2

"""Load datasets"""
mpii = MpiiSinglePerson(datasetpath('MPII'), dataconf=mpii_dataconf,
        poselayout=pa17j3d)

# h36m = Human36M(datasetpath('Human3.6M'), dataconf=human36m_dataconf,
#         poselayout=pa17j3d, topology='frames')f

penn_sf = PennAction(datasetpath('Penn_Action'), pennaction_dataconf,
        poselayout=pa17j3d, topology='frames', use_gt_bbox=True)

ntu_sf = Ntu(datasetpath('NTU'), ntu_pe_dataconf, poselayout=pa17j3d,
        topology='frames', use_gt_bbox=True)

"""Create an object to load data from all datasets."""
data_tr = BatchLoader([mpii, penn_sf, ntu_sf], ['frame'], ['pose'],
        TRAIN_MODE, batch_size=[batch_size_mpii, batch_size_ar,
            batch_size_ar], num_predictions=num_predictions, shuffle=True)
Ejemplo n.º 2
0
weights_path = get_file(weights_file,
                        TF_WEIGHTS_PATH,
                        file_hash=md5_hash,
                        cache_subdir='models')
"""Load pre-trained model."""
model.load_weights(weights_path)
"""Merge pose and visibility as a single output."""
outputs = []
for b in range(int(len(model.outputs) / 2)):
    outputs.append(
        concatenate([model.outputs[2 * b], model.outputs[2 * b + 1]],
                    name='blk%d' % (b + 1)))
model = Model(model.input, outputs, name=model.name)
logger.debug("DATASETS")
"""Load the MPII dataset."""
mpii = MpiiSinglePerson('datasets/MPII', dataconf=mpii_sp_dataconf)
logger.debug(mpii.dataset_path)
"""Pre-load validation samples and generate the eval. callback."""
mpii_val = BatchLoader(mpii,
                       x_dictkeys=['frame'],
                       y_dictkeys=['pose', 'afmat', 'headsize'],
                       mode=VALID_MODE,
                       batch_size=mpii.get_length(VALID_MODE),
                       num_predictions=1,
                       shuffle=False)
logger.debug(mpii_val.datasets)

printcn(OKBLUE, 'Pre-loading MPII validation data...')
[x_val], [p_val, afmat_val, head_val] = mpii_val[0]
pose_pred = np.zeros_like(p_val)
Ejemplo n.º 3
0
num_joints = 16

model = reception.build(input_shape, num_joints, dim=2,
        num_blocks=num_blocks, num_context_per_joint=2, ksize=(5, 5),
        concat_pose_confidence=False)

"""Load pre-trained model."""
# weights_path = get_file(weights_file, TF_WEIGHTS_PATH, md5_hash=md5_hash, cache_subdir='models')
model.load_weights(weights_path)

"""Merge pose and visibility as a single output."""
outputs = []
for b in range(int(len(model.outputs) / 2)):
    outputs.append(concatenate([model.outputs[2*b], model.outputs[2*b + 1]],
        name='blk%d' % (b + 1)))
model = Model(model.input, outputs, name=model.name)

"""Load the MPII dataset."""
mpii = MpiiSinglePerson(f"{dataset_path}datasets/MPII", dataconf=mpii_sp_dataconf)

"""Pre-load validation samples and generate the eval. callback."""
mpii_val = BatchLoader(mpii, x_dictkeys=['frame'],
        y_dictkeys=['pose', 'afmat', 'headsize'], mode=VALID_MODE,
        batch_size=mpii.get_length(VALID_MODE), num_predictions=1,
        shuffle=False)
printcn(OKBLUE, 'Pre-loading MPII validation data...')
[x_val], [p_val, afmat_val, head_val] = mpii_val[0]

eval_singleperson_pckh(model, x_val, p_val[:,:,0:2], afmat_val, head_val)

Ejemplo n.º 4
0
                  pa17j3d,
                  num_pyramids=8,
                  action_pyramids=[],
                  num_levels=4)

num_predictions = spnet.get_num_predictions(cfg.num_pyramids, cfg.num_levels)

start_lr = 0.001
weights_path = os.path.join(
    "E:\\Bachelorarbeit-SS20\\weights\\deephar\\output\\ntu_baseline\\0603",
    'weights_posebaseline_{epoch:03d}.hdf5')

batch_size_mpii = 10
batch_size_ar = 2
"""Load datasets"""
mpii = MpiiSinglePerson("D:\\MPII", dataconf=mpii_dataconf, poselayout=pa17j3d)

h36m = Human36M("B:\\Human3.6M",
                dataconf=human36m_dataconf,
                poselayout=pa17j3d,
                topology='frames')

penn_sf = PennAction("D:\\PennAction",
                     pennaction_dataconf,
                     poselayout=pa17j3d,
                     topology='frames',
                     use_gt_bbox=True)

ntu_sf = Ntu("E:\\Bachelorarbeit-SS20\\datasets\\NTU",
             ntu_pe_dataconf,
             poselayout=pa17j3d,
Ejemplo n.º 5
0
                  num_actions=[15],
                  num_pyramids=6,
                  action_pyramids=[5, 6],
                  num_levels=4,
                  pose_replica=True,
                  num_pose_features=160,
                  num_visual_features=160)

num_predictions = spnet.get_num_predictions(cfg.num_pyramids, cfg.num_levels)
num_action_predictions = \
        spnet.get_num_predictions(len(cfg.action_pyramids), cfg.num_levels)

logger.info("Loading Datasets")
"""Load datasets"""
mpii = MpiiSinglePerson(os.getcwd() + '/datasets/MPII',
                        dataconf=mpii_dataconf,
                        poselayout=pa16j2d)
logger.info("MPII Loaded")

# Check file with bounding boxes
penn_data_path = os.getcwd() + '/datasets/PennAction'
penn_bbox_file = 'penn_pred_bboxes_multitask.json'

if os.path.isfile(os.path.join(penn_data_path, penn_bbox_file)) == False:
    logger.debug(
        f'Error: file {penn_bbox_file} not found in {penn_data_path}!')
    logger.debug(
        f'\nPlease download it from https://drive.google.com/file/d/1qXpEKF0d9KxmQdd2_QSIA1c3WGj1D3Y3/view?usp=sharing'
    )
    sys.stdout.flush()
    sys.exit()
Ejemplo n.º 6
0
"""Architecture configuration."""
num_blocks = 8
batch_size = 24
input_shape = mpii_sp_dataconf.input_shape
num_joints = 16

print("***********************")
print(input_shape)
model = reception.build(input_shape,
                        num_joints,
                        dim=2,
                        num_blocks=num_blocks,
                        num_context_per_joint=2,
                        ksize=(5, 5))
"""Load the MPII dataset."""
mpii = MpiiSinglePerson('datasets/MPII', dataconf=mpii_sp_dataconf)

data_tr = BatchLoader(mpii, ['frame'], ['pose'],
                      TRAIN_MODE,
                      batch_size=batch_size,
                      num_predictions=num_blocks,
                      shuffle=True)
"""Pre-load validation samples and generate the eval. callback."""
mpii_val = BatchLoader(mpii,
                       x_dictkeys=['frame'],
                       y_dictkeys=['pose', 'afmat', 'headsize'],
                       mode=VALID_MODE,
                       batch_size=mpii.get_length(VALID_MODE),
                       num_predictions=1,
                       shuffle=False)
printcn(OKBLUE, 'Pre-loading MPII validation data...')
from deephar.config import mpii_sp_dataconf

from deephar.data import MpiiSinglePerson
from deephar.data import BatchLoader

from deephar.utils import *

sys.path.append(os.path.join(os.getcwd(), 'exp/common'))
from mpii_tools import eval_singleperson_pckh

sys.path.append(os.path.join(os.getcwd(), 'datasets'))
import annothelper

annothelper.check_mpii_dataset()
"""Load the MPII dataset."""
mpii = MpiiSinglePerson('datasets/MPII', dataconf=mpii_sp_dataconf)
"""Pre-load validation samples and generate the eval. callback."""

mpii_val = BatchLoader(
    mpii,
    x_dictkeys=['frame'],
    y_dictkeys=['pose'],
    mode=TRAIN_MODE,
    # batch_size=mpii.get_length(VALID_MODE), num_predictions=1,
    batch_size=1,
    num_predictions=1,
    shuffle=False)
printcn(OKBLUE, 'Pre-loading MPII validation data...')
# img 256x256
[x_val], [p_val] = mpii_val[10]
print(p_val)
Ejemplo n.º 8
0
    num_pose_features=192,
    num_visual_features=192)

num_predictions = spnet.get_num_predictions(cfg.num_pyramids, cfg.num_levels)
num_action_predictions = \
        spnet.get_num_predictions(len(cfg.action_pyramids), cfg.num_levels)

start_lr = 0.01
action_weight = 0.1
batch_size_mpii = 3
batch_size_h36m = 4
batch_size_ntu = 8  #1
batch_clips = 4  # 8/4
"""Load datasets"""
mpii = MpiiSinglePerson(datasetpath('MPII'),
                        dataconf=mpii_dataconf,
                        poselayout=pa17j3d)

# h36m = Human36M(datasetpath('Human3.6M'), dataconf=human36m_dataconf,
# poselayout=pa17j3d, topology='frames')

ntu_sf = Ntu(datasetpath('NTU'),
             ntu_pe_dataconf,
             poselayout=pa17j3d,
             topology='frames',
             use_gt_bbox=True)

ntu = Ntu(datasetpath('NTU'),
          ntu_dataconf,
          poselayout=pa17j3d,
          topology='sequences',
    num_pose_features=192,
    num_visual_features=192)

num_predictions = spnet.get_num_predictions(cfg.num_pyramids, cfg.num_levels)
num_action_predictions = \
        spnet.get_num_predictions(len(cfg.action_pyramids), cfg.num_levels)

start_lr = 0.01
action_weight = 0.1
batch_size_mpii = 3
#batch_size_h36m = 4
batch_size_ntu = 6  #1
batch_clips = 3  # 8/4
"""Load datasets"""
mpii = MpiiSinglePerson("E:\\Bachelorarbeit-SS20\\datasets\\MPII",
                        dataconf=mpii_dataconf,
                        poselayout=pa17j3d)

# h36m = Human36M(datasetpath('Human3.6M'), dataconf=human36m_dataconf,
# poselayout=pa17j3d, topology='frames')

ntu_sf = Ntu("E:\\Bachelorarbeit-SS20\\datasets\\NTU",
             ntu_pe_dataconf,
             poselayout=pa17j3d,
             topology='frames',
             use_gt_bbox=True)

ntu = Ntu("E:\\Bachelorarbeit-SS20\\datasets\\NTU",
          ntu_dataconf,
          poselayout=pa17j3d,
          topology='sequences',
Ejemplo n.º 10
0
    sys.stdout = open(str(logdir) + '/log.txt', 'w')

weights_file = os.path.join(logdir, 'weights_mpii_{epoch:03d}.h5')

"""Architecture configuration."""
num_blocks = 8
batch_size = 24
input_shape = mpii_sp_dataconf.input_shape
num_joints = 16

model = reception.build(input_shape, num_joints, dim=2,
        num_blocks=num_blocks, num_context_per_joint=2, ksize=(5, 5))


"""Load the MPII dataset."""
mpii = MpiiSinglePerson('../../datasets/MPII', dataconf=mpii_sp_dataconf)

data_tr = BatchLoader(mpii, ['frame'], ['pose'], TRAIN_MODE,
        batch_size=batch_size, num_predictions=num_blocks, shuffle=True)

"""Pre-load validation samples and generate the eval. callback."""
mpii_val = BatchLoader(mpii, x_dictkeys=['frame'],
        y_dictkeys=['pose', 'afmat', 'headsize'], mode=VALID_MODE,
        batch_size=mpii.get_length(VALID_MODE), num_predictions=1,
        shuffle=False)
printcn(OKBLUE, 'Pre-loading MPII validation data...')
[x_val], [p_val, afmat_val, head_val] = mpii_val[0]
eval_callback = MpiiEvalCallback(x_val, p_val, afmat_val, head_val,
        eval_model=model, batch_size=2, pred_per_block=1, logdir=logdir)

loss = pose_regression_loss('l1l2bincross', 0.01)
        num_levels=4, pose_replica=False,
        num_pose_features=192, num_visual_features=192)

num_predictions = spnet.get_num_predictions(cfg.num_pyramids, cfg.num_levels)
num_action_predictions = \
        spnet.get_num_predictions(len(cfg.action_pyramids), cfg.num_levels)

start_lr = 0.01
action_weight = 0.1
batch_size_mpii = 3
#batch_size_h36m = 4
batch_size_ntu = 6 #1
batch_clips = 3 # 8/4

"""Load datasets"""
mpii = MpiiSinglePerson("/home/ispl-ex39/Downloads/deephar-master/datasets/MPII", dataconf=mpii_dataconf,
        poselayout=pa17j3d)

# h36m = Human36M(datasetpath('Human3.6M'), dataconf=human36m_dataconf,
        # poselayout=pa17j3d, topology='frames')

ntu_sf = Ntu("/home/ispl-ex39/hdd_ext/hdd2000/NTU", ntu_pe_dataconf, poselayout=pa17j3d,
        topology='frames', use_gt_bbox=True)

ntu = Ntu("/home/ispl-ex39/hdd_ext/hdd2000/NTU", ntu_dataconf, poselayout=pa17j3d,
        topology='sequences', use_gt_bbox=True, clip_size=num_frames)

ntu_s1 = Ntu("/home/ispl-ex39/hdd_ext/hdd2000/NTU", ntu_dataconf, poselayout=pa17j3d,
        topology='sequences', use_gt_bbox=True, clip_size=num_frames)
        # topology='sequences', use_gt_bbox=True, clip_size=num_frames, num_S=1)

pe_data_tr = BatchLoader([ntu_sf], ['frame'], ['pose'], TRAIN_MODE,