예제 #1
0
                    help='save log')
# TO ADD: save_result

args = parser.parse_args()
device = 'cuda:0'
randomized = False

model_data_path = os.path.join(
    model_path, dataset + '_drop_' + str(args.frame_drop) + '_no_aff')
if not os.path.exists(model_data_path):
    os.mkdir(model_data_path)
args.work_dir = model_data_path

# if dataset == 'edin':
data_dict, [data_dict_train, data_dict_valid] = \
    loader.load_edin_data(data_path, num_labels,
                          frame_drop=args.frame_drop, add_mirrored=args.add_mirrored, randomized=randomized)
# data_dict['affective_features'], affs_max, affs_min = loader.scale_data(data_dict['affective_features'])
print('Data points for training:\t{}'.format(len(data_dict_train)))
print('Data points for validation:\t{}'.format(len(data_dict_valid)))
print('Total:\t\t\t\t\t\t{}'.format(len(data_dict)))
num_frames = data_dict['0']['positions'].shape[0]
joints_dict = data_dict['0']['joints_dict']
joint_names = joints_dict['joint_names']
joint_offsets = joints_dict['joint_offsets_all']
joint_parents = joints_dict['joint_parents']
num_joints = len(joint_parents)
coords = data_dict['0']['positions'].shape[-1]
data_loader = dict(train=data_dict_train, test=data_dict)
# elif dataset == 'cmu':
#     data_dict, num_frames = loader.load_cmu_data(data_path,
#                                                  joints_to_model=joints_to_model,
예제 #2
0
파일: main.py 프로젝트: emotionwalk/taew
    type=int,
    default=5,
    metavar='FD',
    help='frame downsample rate (default: 1)')  # CAHNGE THIS, TRY WITH 3 and 5
# TO ADD: save_result

args = parser.parse_args()
device = 'cuda:0'

num_joints = 21
num_labels = [4, 3, 3]
num_classes = 4

data, labels, [data_train, data_test, labels_train, labels_test] =\
        loader.load_edin_data(
            'datasets/data_edin_locomotion_pose_diff_aff_drop_{}.npz'.format(args.frame_drop),
            'datasets/labels_edin_locomotion', num_labels)
graph_dict = {'strategy': 'spatial'}

if args.train:
    X_train, X_val = torch.from_numpy(data_train).cuda(), torch.from_numpy(
        data_test).cuda()
    Y_train, Y_val = torch.from_numpy(labels_train).cuda(), torch.from_numpy(
        labels_test).cuda()

    train_set = TensorDataset(X_train, Y_train)
    val_set = TensorDataset(X_val, Y_val)

    train_loader = DataLoader(train_set, batch_size=128)
    val_loader = DataLoader(val_set, batch_size=128)
예제 #3
0
                    help='path to save model')
# TO ADD: save_result

args = parser.parse_args()
device = 'cuda:0'

if dataset == 'ewalk':
    [data_train, data_test, poses_train, poses_test, rotations_train, rotations_test,
     translations_train, translations_test, affective_features_train, affective_features_test,
     num_frames_train, num_frames_test, labels_train, labels_test], data_max, data_min =\
        loader.load_ewalk_data(data_path, coords, num_joints, upsample=upsample)
elif dataset == 'edin':
    [data_train, data_test, poses_train, poses_test, rotations_train, rotations_test,
     translations_train, translations_test, affective_features_train, affective_features_test,
     num_frames_train, num_frames_test, labels_train, labels_test], label_weights =\
        loader.load_edin_data(data_path, coords, num_joints, num_labels, frame_drop=args.frame_drop)
diffs_dim = int(rotations_train.shape[-1] / num_joints)
affs_dim = affective_features_train.shape[-1] + deep_dim
affective_features = np.concatenate(
    (affective_features_train, affective_features_test), axis=0)
affective_features, affs_max, affs_min = loader.scale_data(affective_features)
affective_features_train, _, _ = loader.scale_data(affective_features_train,
                                                   affs_max, affs_min)
affective_features_test, _, _ = loader.scale_data(affective_features_test,
                                                  affs_max, affs_min)
num_frames_max = rotations_train.shape[1]
num_frames_out = num_frames_max - 1
num_frames_train_norm = num_frames_train / num_frames_max
num_frames_test_norm = num_frames_test / num_frames_max
data_loader = list()
data_loader.append(